semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
- sempy_labs/__init__.py +14 -2
- sempy_labs/_authentication.py +31 -2
- sempy_labs/_clear_cache.py +39 -37
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +57 -11
- sempy_labs/_generate_semantic_model.py +100 -71
- sempy_labs/_git.py +134 -67
- sempy_labs/_helper_functions.py +199 -145
- sempy_labs/_job_scheduler.py +92 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +281 -120
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +27 -25
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +60 -28
- sempy_labs/_notebooks.py +73 -39
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +67 -64
- sempy_labs/_refresh_semantic_model.py +47 -42
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +12 -9
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +34 -31
- sempy_labs/_warehouses.py +22 -21
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/__init__.py +4 -0
- sempy_labs/admin/_basic_functions.py +44 -12
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_items.py +4 -4
- sempy_labs/admin/_scanner.py +7 -5
- sempy_labs/directlake/_directlake_schema_compare.py +18 -14
- sempy_labs/directlake/_directlake_schema_sync.py +18 -12
- sempy_labs/directlake/_dl_helper.py +36 -32
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
- sempy_labs/lakehouse/_lakehouse.py +17 -13
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +40 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +13 -9
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +117 -38
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
sempy_labs/_list_functions.py
CHANGED
|
@@ -3,29 +3,30 @@ from sempy_labs._helper_functions import (
|
|
|
3
3
|
resolve_workspace_name_and_id,
|
|
4
4
|
create_relationship_name,
|
|
5
5
|
resolve_lakehouse_id,
|
|
6
|
-
resolve_dataset_id,
|
|
7
6
|
pagination,
|
|
8
7
|
resolve_item_type,
|
|
9
8
|
format_dax_object_name,
|
|
9
|
+
resolve_dataset_name_and_id,
|
|
10
10
|
)
|
|
11
11
|
import pandas as pd
|
|
12
12
|
from typing import Optional
|
|
13
13
|
import sempy_labs._icons as icons
|
|
14
14
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
15
|
+
from uuid import UUID
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
def get_object_level_security(
|
|
18
|
-
dataset: str, workspace: Optional[str] = None
|
|
19
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
19
20
|
) -> pd.DataFrame:
|
|
20
21
|
"""
|
|
21
22
|
Shows the object level security for the semantic model.
|
|
22
23
|
|
|
23
24
|
Parameters
|
|
24
25
|
----------
|
|
25
|
-
dataset : str
|
|
26
|
-
Name of the semantic model.
|
|
27
|
-
workspace : str, default=None
|
|
28
|
-
The Fabric workspace name.
|
|
26
|
+
dataset : str | uuid.UUID
|
|
27
|
+
Name or ID of the semantic model.
|
|
28
|
+
workspace : str | uuid.UUID, default=None
|
|
29
|
+
The Fabric workspace name or ID.
|
|
29
30
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
30
31
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
32
|
|
|
@@ -37,12 +38,13 @@ def get_object_level_security(
|
|
|
37
38
|
|
|
38
39
|
from sempy_labs.tom import connect_semantic_model
|
|
39
40
|
|
|
40
|
-
|
|
41
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
42
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
41
43
|
|
|
42
44
|
df = pd.DataFrame(columns=["Role Name", "Object Type", "Table Name", "Object Name"])
|
|
43
45
|
|
|
44
46
|
with connect_semantic_model(
|
|
45
|
-
dataset=
|
|
47
|
+
dataset=dataset_id, readonly=True, workspace=workspace_id
|
|
46
48
|
) as tom:
|
|
47
49
|
|
|
48
50
|
for r in tom.model.Roles:
|
|
@@ -82,17 +84,17 @@ def get_object_level_security(
|
|
|
82
84
|
|
|
83
85
|
|
|
84
86
|
def list_tables(
|
|
85
|
-
dataset: str, workspace: Optional[str] = None, extended: bool = False
|
|
87
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
|
|
86
88
|
) -> pd.DataFrame:
|
|
87
89
|
"""
|
|
88
90
|
Shows a semantic model's tables and their properties.
|
|
89
91
|
|
|
90
92
|
Parameters
|
|
91
93
|
----------
|
|
92
|
-
dataset : str
|
|
93
|
-
Name of the semantic model.
|
|
94
|
-
workspace : str, default=None
|
|
95
|
-
The Fabric workspace name.
|
|
94
|
+
dataset : str | uuid.UUID
|
|
95
|
+
Name or ID of the semantic model.
|
|
96
|
+
workspace : str | uuid.UUID, default=None
|
|
97
|
+
The Fabric workspace name or ID.
|
|
96
98
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
97
99
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
98
100
|
extended : bool, default=False
|
|
@@ -106,7 +108,8 @@ def list_tables(
|
|
|
106
108
|
|
|
107
109
|
from sempy_labs.tom import connect_semantic_model
|
|
108
110
|
|
|
109
|
-
|
|
111
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
112
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
110
113
|
|
|
111
114
|
df = pd.DataFrame(
|
|
112
115
|
columns=[
|
|
@@ -121,20 +124,20 @@ def list_tables(
|
|
|
121
124
|
)
|
|
122
125
|
|
|
123
126
|
with connect_semantic_model(
|
|
124
|
-
dataset=
|
|
127
|
+
dataset=dataset_id, workspace=workspace_id, readonly=True
|
|
125
128
|
) as tom:
|
|
126
129
|
if extended:
|
|
127
130
|
dict_df = fabric.evaluate_dax(
|
|
128
|
-
dataset=
|
|
129
|
-
workspace=
|
|
131
|
+
dataset=dataset_id,
|
|
132
|
+
workspace=workspace_id,
|
|
130
133
|
dax_string="""
|
|
131
134
|
EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DIMENSION_NAME],[DICTIONARY_SIZE])
|
|
132
135
|
""",
|
|
133
136
|
)
|
|
134
137
|
dict_sum = dict_df.groupby("[DIMENSION_NAME]")["[DICTIONARY_SIZE]"].sum()
|
|
135
138
|
data = fabric.evaluate_dax(
|
|
136
|
-
dataset=
|
|
137
|
-
workspace=
|
|
139
|
+
dataset=dataset_id,
|
|
140
|
+
workspace=workspace_id,
|
|
138
141
|
dax_string="""EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[TABLE_ID],[DIMENSION_NAME],[USED_SIZE])""",
|
|
139
142
|
)
|
|
140
143
|
data_sum = (
|
|
@@ -162,8 +165,8 @@ def list_tables(
|
|
|
162
165
|
.sum()
|
|
163
166
|
)
|
|
164
167
|
rc = fabric.evaluate_dax(
|
|
165
|
-
dataset=
|
|
166
|
-
workspace=
|
|
168
|
+
dataset=dataset_id,
|
|
169
|
+
workspace=workspace_id,
|
|
167
170
|
dax_string="""
|
|
168
171
|
SELECT [DIMENSION_NAME],[ROWS_COUNT] FROM $SYSTEM.DISCOVER_STORAGE_TABLES
|
|
169
172
|
WHERE RIGHT ( LEFT ( TABLE_ID, 2 ), 1 ) <> '$'
|
|
@@ -254,16 +257,18 @@ def list_tables(
|
|
|
254
257
|
return df
|
|
255
258
|
|
|
256
259
|
|
|
257
|
-
def list_annotations(
|
|
260
|
+
def list_annotations(
|
|
261
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
262
|
+
) -> pd.DataFrame:
|
|
258
263
|
"""
|
|
259
264
|
Shows a semantic model's annotations and their properties.
|
|
260
265
|
|
|
261
266
|
Parameters
|
|
262
267
|
----------
|
|
263
|
-
dataset : str
|
|
264
|
-
Name of the semantic model.
|
|
265
|
-
workspace : str, default=None
|
|
266
|
-
The Fabric workspace name.
|
|
268
|
+
dataset : str | uuid.UUID
|
|
269
|
+
Name or ID of the semantic model.
|
|
270
|
+
workspace : str | uuid.UUID, default=None
|
|
271
|
+
The Fabric workspace name or ID.
|
|
267
272
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
268
273
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
269
274
|
|
|
@@ -275,7 +280,8 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
275
280
|
|
|
276
281
|
from sempy_labs.tom import connect_semantic_model
|
|
277
282
|
|
|
278
|
-
|
|
283
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
284
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
279
285
|
|
|
280
286
|
df = pd.DataFrame(
|
|
281
287
|
columns=[
|
|
@@ -288,7 +294,7 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
288
294
|
)
|
|
289
295
|
|
|
290
296
|
with connect_semantic_model(
|
|
291
|
-
dataset=
|
|
297
|
+
dataset=dataset_id, readonly=True, workspace=workspace_id
|
|
292
298
|
) as tom:
|
|
293
299
|
|
|
294
300
|
mName = tom.model.Name
|
|
@@ -485,8 +491,8 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
485
491
|
|
|
486
492
|
|
|
487
493
|
def list_columns(
|
|
488
|
-
dataset: str,
|
|
489
|
-
workspace: Optional[str] = None,
|
|
494
|
+
dataset: str | UUID,
|
|
495
|
+
workspace: Optional[str | UUID] = None,
|
|
490
496
|
lakehouse: Optional[str] = None,
|
|
491
497
|
lakehouse_workspace: Optional[str] = None,
|
|
492
498
|
) -> pd.DataFrame:
|
|
@@ -495,10 +501,10 @@ def list_columns(
|
|
|
495
501
|
|
|
496
502
|
Parameters
|
|
497
503
|
----------
|
|
498
|
-
dataset : str
|
|
499
|
-
Name of the semantic model.
|
|
500
|
-
workspace : str, default=None
|
|
501
|
-
The Fabric workspace name.
|
|
504
|
+
dataset : str | uuid.UUID
|
|
505
|
+
Name or ID of the semantic model.
|
|
506
|
+
workspace : str | uuid.UUID, default=None
|
|
507
|
+
The Fabric workspace name or ID.
|
|
502
508
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
503
509
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
504
510
|
lakehouse : str, default=None
|
|
@@ -519,20 +525,21 @@ def list_columns(
|
|
|
519
525
|
)
|
|
520
526
|
from pyspark.sql import SparkSession
|
|
521
527
|
|
|
522
|
-
|
|
528
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
529
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
523
530
|
|
|
524
|
-
dfP = fabric.list_partitions(dataset=
|
|
531
|
+
dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
|
|
525
532
|
|
|
526
533
|
isDirectLake = any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows())
|
|
527
534
|
|
|
528
|
-
dfC = fabric.list_columns(dataset=
|
|
535
|
+
dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
|
|
529
536
|
|
|
530
537
|
if isDirectLake:
|
|
531
538
|
dfC["Column Cardinality"] = None
|
|
532
539
|
sql_statements = []
|
|
533
540
|
(lakeID, lakeName) = get_direct_lake_lakehouse(
|
|
534
|
-
dataset=
|
|
535
|
-
workspace=
|
|
541
|
+
dataset=dataset_id,
|
|
542
|
+
workspace=workspace_id,
|
|
536
543
|
lakehouse=lakehouse,
|
|
537
544
|
lakehouse_workspace=lakehouse_workspace,
|
|
538
545
|
)
|
|
@@ -586,14 +593,14 @@ def list_columns(
|
|
|
586
593
|
return dfC
|
|
587
594
|
|
|
588
595
|
|
|
589
|
-
def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
596
|
+
def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
590
597
|
"""
|
|
591
598
|
Shows a list of the dashboards within a workspace.
|
|
592
599
|
|
|
593
600
|
Parameters
|
|
594
601
|
----------
|
|
595
|
-
workspace : str, default=None
|
|
596
|
-
The Fabric workspace name.
|
|
602
|
+
workspace : str | uuid.UUID, default=None
|
|
603
|
+
The Fabric workspace name or ID.
|
|
597
604
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
598
605
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
599
606
|
|
|
@@ -616,11 +623,7 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
616
623
|
]
|
|
617
624
|
)
|
|
618
625
|
|
|
619
|
-
|
|
620
|
-
workspace_id = fabric.get_workspace_id()
|
|
621
|
-
workspace = fabric.resovle_workspace_name(workspace_id)
|
|
622
|
-
else:
|
|
623
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
626
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
624
627
|
|
|
625
628
|
client = fabric.PowerBIRestClient()
|
|
626
629
|
response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dashboards")
|
|
@@ -635,8 +638,8 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
635
638
|
"Web URL": v.get("webUrl"),
|
|
636
639
|
"Embed URL": v.get("embedUrl"),
|
|
637
640
|
"Data Classification": v.get("dataClassification"),
|
|
638
|
-
"Users":
|
|
639
|
-
"Subscriptions":
|
|
641
|
+
"Users": v.get("users"),
|
|
642
|
+
"Subscriptions": v.get("subscriptions"),
|
|
640
643
|
}
|
|
641
644
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
642
645
|
|
|
@@ -645,14 +648,14 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
645
648
|
return df
|
|
646
649
|
|
|
647
650
|
|
|
648
|
-
def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
651
|
+
def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
649
652
|
"""
|
|
650
653
|
Shows the lakehouses within a workspace.
|
|
651
654
|
|
|
652
655
|
Parameters
|
|
653
656
|
----------
|
|
654
|
-
workspace : str, default=None
|
|
655
|
-
The Fabric workspace name.
|
|
657
|
+
workspace : str | uuid.UUID, default=None
|
|
658
|
+
The Fabric workspace name or ID.
|
|
656
659
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
657
660
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
658
661
|
|
|
@@ -675,7 +678,7 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
675
678
|
]
|
|
676
679
|
)
|
|
677
680
|
|
|
678
|
-
(
|
|
681
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
679
682
|
|
|
680
683
|
client = fabric.FabricRestClient()
|
|
681
684
|
response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses")
|
|
@@ -705,14 +708,14 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
705
708
|
return df
|
|
706
709
|
|
|
707
710
|
|
|
708
|
-
def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
711
|
+
def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
709
712
|
"""
|
|
710
713
|
Shows the SQL endpoints within a workspace.
|
|
711
714
|
|
|
712
715
|
Parameters
|
|
713
716
|
----------
|
|
714
|
-
workspace : str, default=None
|
|
715
|
-
The Fabric workspace name.
|
|
717
|
+
workspace : str | uuid.UUID, default=None
|
|
718
|
+
The Fabric workspace name or ID.
|
|
716
719
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
717
720
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
718
721
|
|
|
@@ -724,7 +727,7 @@ def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
724
727
|
|
|
725
728
|
df = pd.DataFrame(columns=["SQL Endpoint Id", "SQL Endpoint Name", "Description"])
|
|
726
729
|
|
|
727
|
-
(
|
|
730
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
728
731
|
|
|
729
732
|
client = fabric.FabricRestClient()
|
|
730
733
|
response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints")
|
|
@@ -746,14 +749,14 @@ def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
746
749
|
return df
|
|
747
750
|
|
|
748
751
|
|
|
749
|
-
def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
752
|
+
def list_datamarts(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
750
753
|
"""
|
|
751
754
|
Shows the datamarts within a workspace.
|
|
752
755
|
|
|
753
756
|
Parameters
|
|
754
757
|
----------
|
|
755
|
-
workspace : str, default=None
|
|
756
|
-
The Fabric workspace name.
|
|
758
|
+
workspace : str | uuid.UUID, default=None
|
|
759
|
+
The Fabric workspace name or ID.
|
|
757
760
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
758
761
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
759
762
|
|
|
@@ -765,7 +768,7 @@ def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
765
768
|
|
|
766
769
|
df = pd.DataFrame(columns=["Datamart Name", "Datamart ID", "Description"])
|
|
767
770
|
|
|
768
|
-
(
|
|
771
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
769
772
|
|
|
770
773
|
client = fabric.FabricRestClient()
|
|
771
774
|
response = client.get(f"/v1/workspaces/{workspace_id}/datamarts")
|
|
@@ -791,7 +794,7 @@ def update_item(
|
|
|
791
794
|
current_name: str,
|
|
792
795
|
new_name: str,
|
|
793
796
|
description: Optional[str] = None,
|
|
794
|
-
workspace: Optional[str] = None,
|
|
797
|
+
workspace: Optional[str | UUID] = None,
|
|
795
798
|
):
|
|
796
799
|
"""
|
|
797
800
|
Updates the name/description of a Fabric item.
|
|
@@ -806,13 +809,13 @@ def update_item(
|
|
|
806
809
|
The new name of the item.
|
|
807
810
|
description : str, default=None
|
|
808
811
|
A description of the item.
|
|
809
|
-
workspace : str, default=None
|
|
810
|
-
The Fabric workspace name.
|
|
812
|
+
workspace : str | uuid.UUID, default=None
|
|
813
|
+
The Fabric workspace name or ID.
|
|
811
814
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
812
815
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
813
816
|
"""
|
|
814
817
|
|
|
815
|
-
(
|
|
818
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
816
819
|
item_type = item_type.replace(" ", "").capitalize()
|
|
817
820
|
|
|
818
821
|
if item_type not in icons.itemTypes.keys():
|
|
@@ -822,12 +825,12 @@ def update_item(
|
|
|
822
825
|
|
|
823
826
|
itemType = icons.itemTypes[item_type]
|
|
824
827
|
|
|
825
|
-
dfI = fabric.list_items(workspace=
|
|
828
|
+
dfI = fabric.list_items(workspace=workspace_id, type=item_type)
|
|
826
829
|
dfI_filt = dfI[(dfI["Display Name"] == current_name)]
|
|
827
830
|
|
|
828
831
|
if len(dfI_filt) == 0:
|
|
829
832
|
raise ValueError(
|
|
830
|
-
f"{icons.red_dot} The '{current_name}' {item_type} does not exist within the '{
|
|
833
|
+
f"{icons.red_dot} The '{current_name}' {item_type} does not exist within the '{workspace_name}' workspace."
|
|
831
834
|
)
|
|
832
835
|
|
|
833
836
|
itemId = dfI_filt["Id"].iloc[0]
|
|
@@ -845,26 +848,26 @@ def update_item(
|
|
|
845
848
|
raise FabricHTTPException(response)
|
|
846
849
|
if description is None:
|
|
847
850
|
print(
|
|
848
|
-
f"{icons.green_dot} The '{current_name}' {item_type} within the '{
|
|
851
|
+
f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace_name}' workspace has been updated to be named '{new_name}'"
|
|
849
852
|
)
|
|
850
853
|
else:
|
|
851
854
|
print(
|
|
852
|
-
f"{icons.green_dot} The '{current_name}' {item_type} within the '{
|
|
855
|
+
f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace_name}' workspace has been updated to be named '{new_name}' and have a description of '{description}'"
|
|
853
856
|
)
|
|
854
857
|
|
|
855
858
|
|
|
856
859
|
def list_relationships(
|
|
857
|
-
dataset: str, workspace: Optional[str] = None, extended: bool = False
|
|
860
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
|
|
858
861
|
) -> pd.DataFrame:
|
|
859
862
|
"""
|
|
860
863
|
Shows a semantic model's relationships and their properties.
|
|
861
864
|
|
|
862
865
|
Parameters
|
|
863
866
|
----------
|
|
864
|
-
dataset: str
|
|
865
|
-
Name of the semantic model.
|
|
866
|
-
workspace : str, default=None
|
|
867
|
-
The Fabric workspace name.
|
|
867
|
+
dataset: str | uuid.UUID
|
|
868
|
+
Name or UUID of the semantic model.
|
|
869
|
+
workspace : str | uuid.UUID, default=None
|
|
870
|
+
The Fabric workspace name or ID.
|
|
868
871
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
869
872
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
870
873
|
extended : bool, default=False
|
|
@@ -876,17 +879,18 @@ def list_relationships(
|
|
|
876
879
|
A pandas dataframe showing the object level security for the semantic model.
|
|
877
880
|
"""
|
|
878
881
|
|
|
879
|
-
|
|
882
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
883
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
880
884
|
|
|
881
|
-
dfR = fabric.list_relationships(dataset=
|
|
885
|
+
dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
|
|
882
886
|
dfR["From Object"] = format_dax_object_name(dfR["From Table"], dfR["From Column"])
|
|
883
887
|
dfR["To Object"] = format_dax_object_name(dfR["To Table"], dfR["To Column"])
|
|
884
888
|
|
|
885
889
|
if extended:
|
|
886
890
|
# Used to map the Relationship IDs
|
|
887
891
|
rel = fabric.evaluate_dax(
|
|
888
|
-
dataset=
|
|
889
|
-
workspace=
|
|
892
|
+
dataset=dataset_id,
|
|
893
|
+
workspace=workspace_id,
|
|
890
894
|
dax_string="""
|
|
891
895
|
SELECT
|
|
892
896
|
[ID] AS [RelationshipID]
|
|
@@ -897,8 +901,8 @@ def list_relationships(
|
|
|
897
901
|
|
|
898
902
|
# USED_SIZE shows the Relationship Size where TABLE_ID starts with R$
|
|
899
903
|
cs = fabric.evaluate_dax(
|
|
900
|
-
dataset=
|
|
901
|
-
workspace=
|
|
904
|
+
dataset=dataset_id,
|
|
905
|
+
workspace=workspace_id,
|
|
902
906
|
dax_string="""
|
|
903
907
|
SELECT
|
|
904
908
|
[TABLE_ID]
|
|
@@ -936,16 +940,18 @@ def list_relationships(
|
|
|
936
940
|
return dfR
|
|
937
941
|
|
|
938
942
|
|
|
939
|
-
def list_kpis(
|
|
943
|
+
def list_kpis(
|
|
944
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
945
|
+
) -> pd.DataFrame:
|
|
940
946
|
"""
|
|
941
947
|
Shows a semantic model's KPIs and their properties.
|
|
942
948
|
|
|
943
949
|
Parameters
|
|
944
950
|
----------
|
|
945
|
-
dataset: str
|
|
946
|
-
Name of the semantic model.
|
|
947
|
-
workspace : str, default=None
|
|
948
|
-
The Fabric workspace name.
|
|
951
|
+
dataset: str | uuid.UUID
|
|
952
|
+
Name or ID of the semantic model.
|
|
953
|
+
workspace : str | uuid.UUID, default=None
|
|
954
|
+
The Fabric workspace name or ID.
|
|
949
955
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
950
956
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
951
957
|
|
|
@@ -957,8 +963,11 @@ def list_kpis(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
957
963
|
|
|
958
964
|
from sempy_labs.tom import connect_semantic_model
|
|
959
965
|
|
|
966
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
967
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
968
|
+
|
|
960
969
|
with connect_semantic_model(
|
|
961
|
-
dataset=
|
|
970
|
+
dataset=dataset_id, workspace=workspace_id, readonly=True
|
|
962
971
|
) as tom:
|
|
963
972
|
|
|
964
973
|
df = pd.DataFrame(
|
|
@@ -1001,17 +1010,17 @@ def list_kpis(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
1001
1010
|
|
|
1002
1011
|
|
|
1003
1012
|
def list_semantic_model_objects(
|
|
1004
|
-
dataset: str, workspace: Optional[str] = None
|
|
1013
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
1005
1014
|
) -> pd.DataFrame:
|
|
1006
1015
|
"""
|
|
1007
1016
|
Shows a list of semantic model objects.
|
|
1008
1017
|
|
|
1009
1018
|
Parameters
|
|
1010
1019
|
----------
|
|
1011
|
-
dataset : str
|
|
1012
|
-
Name of the semantic model.
|
|
1013
|
-
workspace : str, default=None
|
|
1014
|
-
The Fabric workspace name.
|
|
1020
|
+
dataset : str | uuid.UUID
|
|
1021
|
+
Name or ID of the semantic model.
|
|
1022
|
+
workspace : str | uuid.UUID, default=None
|
|
1023
|
+
The Fabric workspace name or ID.
|
|
1015
1024
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1016
1025
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1017
1026
|
|
|
@@ -1165,7 +1174,7 @@ def list_semantic_model_objects(
|
|
|
1165
1174
|
|
|
1166
1175
|
|
|
1167
1176
|
def list_shortcuts(
|
|
1168
|
-
lakehouse: Optional[str] = None, workspace: Optional[str] = None
|
|
1177
|
+
lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
1169
1178
|
) -> pd.DataFrame:
|
|
1170
1179
|
"""
|
|
1171
1180
|
Shows all shortcuts which exist in a Fabric lakehouse and their properties.
|
|
@@ -1175,8 +1184,8 @@ def list_shortcuts(
|
|
|
1175
1184
|
lakehouse : str, default=None
|
|
1176
1185
|
The Fabric lakehouse name.
|
|
1177
1186
|
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
1178
|
-
workspace : str, default=None
|
|
1179
|
-
The name of the Fabric workspace in which lakehouse resides.
|
|
1187
|
+
workspace : str | uuid.UUID, default=None
|
|
1188
|
+
The name or ID of the Fabric workspace in which lakehouse resides.
|
|
1180
1189
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1181
1190
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1182
1191
|
|
|
@@ -1186,12 +1195,12 @@ def list_shortcuts(
|
|
|
1186
1195
|
A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
|
|
1187
1196
|
"""
|
|
1188
1197
|
|
|
1189
|
-
(
|
|
1198
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1190
1199
|
|
|
1191
1200
|
if lakehouse is None:
|
|
1192
1201
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
1193
1202
|
else:
|
|
1194
|
-
lakehouse_id = resolve_lakehouse_id(lakehouse,
|
|
1203
|
+
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
|
|
1195
1204
|
|
|
1196
1205
|
client = fabric.FabricRestClient()
|
|
1197
1206
|
|
|
@@ -1308,17 +1317,17 @@ def list_capacities() -> pd.DataFrame:
|
|
|
1308
1317
|
|
|
1309
1318
|
|
|
1310
1319
|
def list_reports_using_semantic_model(
|
|
1311
|
-
dataset: str, workspace: Optional[str] = None
|
|
1320
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
1312
1321
|
) -> pd.DataFrame:
|
|
1313
1322
|
"""
|
|
1314
1323
|
Shows a list of all the reports (in all workspaces) which use a given semantic model.
|
|
1315
1324
|
|
|
1316
1325
|
Parameters
|
|
1317
1326
|
----------
|
|
1318
|
-
dataset : str
|
|
1319
|
-
Name of the semantic model.
|
|
1320
|
-
workspace : str, default=None
|
|
1321
|
-
The Fabric workspace name.
|
|
1327
|
+
dataset : str | uuid.UUID
|
|
1328
|
+
Name or ID of the semantic model.
|
|
1329
|
+
workspace : str | uuid.UUID, default=None
|
|
1330
|
+
The Fabric workspace name or ID.
|
|
1322
1331
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1323
1332
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1324
1333
|
|
|
@@ -1337,8 +1346,9 @@ def list_reports_using_semantic_model(
|
|
|
1337
1346
|
]
|
|
1338
1347
|
)
|
|
1339
1348
|
|
|
1340
|
-
|
|
1341
|
-
dataset_id =
|
|
1349
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1350
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
1351
|
+
|
|
1342
1352
|
client = fabric.PowerBIRestClient()
|
|
1343
1353
|
response = client.get(
|
|
1344
1354
|
f"metadata/relations/downstream/dataset/{dataset_id}?apiVersion=3"
|
|
@@ -1365,7 +1375,7 @@ def list_reports_using_semantic_model(
|
|
|
1365
1375
|
|
|
1366
1376
|
|
|
1367
1377
|
def list_report_semantic_model_objects(
|
|
1368
|
-
dataset: str, workspace: Optional[str] = None, extended: bool = False
|
|
1378
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
|
|
1369
1379
|
) -> pd.DataFrame:
|
|
1370
1380
|
"""
|
|
1371
1381
|
Shows a list of semantic model objects (i.e. columns, measures, hierarchies) used in all reports which feed data from
|
|
@@ -1375,10 +1385,10 @@ def list_report_semantic_model_objects(
|
|
|
1375
1385
|
|
|
1376
1386
|
Parameters
|
|
1377
1387
|
----------
|
|
1378
|
-
dataset : str
|
|
1379
|
-
Name of the semantic model.
|
|
1380
|
-
workspace : str, default=None
|
|
1381
|
-
The Fabric workspace name.
|
|
1388
|
+
dataset : str | uuid.UUID
|
|
1389
|
+
Name or ID of the semantic model.
|
|
1390
|
+
workspace : str | uuid.UUID, default=None
|
|
1391
|
+
The Fabric workspace name or ID.
|
|
1382
1392
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1383
1393
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1384
1394
|
extended: bool, default=False
|
|
@@ -1407,8 +1417,11 @@ def list_report_semantic_model_objects(
|
|
|
1407
1417
|
]
|
|
1408
1418
|
)
|
|
1409
1419
|
|
|
1420
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1421
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
1422
|
+
|
|
1410
1423
|
# Collect all reports which use the semantic model
|
|
1411
|
-
dfR = list_reports_using_semantic_model(dataset=
|
|
1424
|
+
dfR = list_reports_using_semantic_model(dataset=dataset_id, workspace=workspace_id)
|
|
1412
1425
|
|
|
1413
1426
|
if len(dfR) == 0:
|
|
1414
1427
|
return dfRO
|
|
@@ -1432,7 +1445,7 @@ def list_report_semantic_model_objects(
|
|
|
1432
1445
|
# Collect all semantic model objects
|
|
1433
1446
|
if extended:
|
|
1434
1447
|
with connect_semantic_model(
|
|
1435
|
-
dataset=
|
|
1448
|
+
dataset=dataset_id, readonly=True, workspace=workspace_id
|
|
1436
1449
|
) as tom:
|
|
1437
1450
|
for index, row in dfRO.iterrows():
|
|
1438
1451
|
object_type = row["Object Type"]
|
|
@@ -1457,8 +1470,8 @@ def list_report_semantic_model_objects(
|
|
|
1457
1470
|
|
|
1458
1471
|
|
|
1459
1472
|
def list_semantic_model_object_report_usage(
|
|
1460
|
-
dataset: str,
|
|
1461
|
-
workspace: Optional[str] = None,
|
|
1473
|
+
dataset: str | UUID,
|
|
1474
|
+
workspace: Optional[str | UUID] = None,
|
|
1462
1475
|
include_dependencies: bool = False,
|
|
1463
1476
|
extended: bool = False,
|
|
1464
1477
|
) -> pd.DataFrame:
|
|
@@ -1469,10 +1482,10 @@ def list_semantic_model_object_report_usage(
|
|
|
1469
1482
|
|
|
1470
1483
|
Parameters
|
|
1471
1484
|
----------
|
|
1472
|
-
dataset : str
|
|
1473
|
-
Name of the semantic model.
|
|
1474
|
-
workspace : str, default=None
|
|
1475
|
-
The Fabric workspace name.
|
|
1485
|
+
dataset : str | uuid.UUID
|
|
1486
|
+
Name or ID of the semantic model.
|
|
1487
|
+
workspace : str | uuid.UUID, default=None
|
|
1488
|
+
The Fabric workspace name or ID.
|
|
1476
1489
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1477
1490
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1478
1491
|
include_dependencies : bool, default=False
|
|
@@ -1490,9 +1503,10 @@ def list_semantic_model_object_report_usage(
|
|
|
1490
1503
|
from sempy_labs._model_dependencies import get_model_calc_dependencies
|
|
1491
1504
|
from sempy_labs._helper_functions import format_dax_object_name
|
|
1492
1505
|
|
|
1493
|
-
|
|
1506
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1507
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
1494
1508
|
|
|
1495
|
-
dfR = list_report_semantic_model_objects(dataset=
|
|
1509
|
+
dfR = list_report_semantic_model_objects(dataset=dataset_id, workspace=workspace_id)
|
|
1496
1510
|
usage_column_name = "Report Usage Count"
|
|
1497
1511
|
|
|
1498
1512
|
if not include_dependencies:
|
|
@@ -1503,7 +1517,7 @@ def list_semantic_model_object_report_usage(
|
|
|
1503
1517
|
)
|
|
1504
1518
|
else:
|
|
1505
1519
|
df = pd.DataFrame(columns=["Table Name", "Object Name", "Object Type"])
|
|
1506
|
-
dep = get_model_calc_dependencies(dataset=
|
|
1520
|
+
dep = get_model_calc_dependencies(dataset=dataset_id, workspace=workspace_id)
|
|
1507
1521
|
|
|
1508
1522
|
for i, r in dfR.iterrows():
|
|
1509
1523
|
object_type = r["Object Type"]
|
|
@@ -1543,7 +1557,9 @@ def list_semantic_model_object_report_usage(
|
|
|
1543
1557
|
final_df["Object"] = format_dax_object_name(
|
|
1544
1558
|
final_df["Table Name"], final_df["Object Name"]
|
|
1545
1559
|
)
|
|
1546
|
-
dfC = fabric.list_columns(
|
|
1560
|
+
dfC = fabric.list_columns(
|
|
1561
|
+
dataset=dataset_id, workspace=workspace_id, extended=True
|
|
1562
|
+
)
|
|
1547
1563
|
dfC["Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
|
|
1548
1564
|
final_df = pd.merge(
|
|
1549
1565
|
final_df,
|
|
@@ -1574,3 +1590,148 @@ def list_semantic_model_object_report_usage(
|
|
|
1574
1590
|
final_df.reset_index(drop=True, inplace=True)
|
|
1575
1591
|
|
|
1576
1592
|
return final_df
|
|
1593
|
+
|
|
1594
|
+
|
|
1595
|
+
def list_server_properties(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
1596
|
+
"""
|
|
1597
|
+
Lists the `properties <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.serverproperty?view=analysisservices-dotnet>`_ of the Analysis Services instance.
|
|
1598
|
+
|
|
1599
|
+
Parameters
|
|
1600
|
+
----------
|
|
1601
|
+
workspace : str | uuid.UUID, default=None
|
|
1602
|
+
The Fabric workspace name or ID.
|
|
1603
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1604
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1605
|
+
|
|
1606
|
+
Returns
|
|
1607
|
+
-------
|
|
1608
|
+
pandas.DataFrame
|
|
1609
|
+
A pandas dataframe showing a list of the server properties.
|
|
1610
|
+
"""
|
|
1611
|
+
|
|
1612
|
+
tom_server = fabric.create_tom_server(readonly=True, workspace=workspace)
|
|
1613
|
+
|
|
1614
|
+
rows = [
|
|
1615
|
+
{
|
|
1616
|
+
"Name": sp.Name,
|
|
1617
|
+
"Value": sp.Value,
|
|
1618
|
+
"Default Value": sp.DefaultValue,
|
|
1619
|
+
"Is Read Only": sp.IsReadOnly,
|
|
1620
|
+
"Requires Restart": sp.RequiresRestart,
|
|
1621
|
+
"Units": sp.Units,
|
|
1622
|
+
"Category": sp.Category,
|
|
1623
|
+
}
|
|
1624
|
+
for sp in tom_server.ServerProperties
|
|
1625
|
+
]
|
|
1626
|
+
|
|
1627
|
+
tom_server.Dispose()
|
|
1628
|
+
df = pd.DataFrame(rows)
|
|
1629
|
+
|
|
1630
|
+
bool_cols = ["Is Read Only", "Requires Restart"]
|
|
1631
|
+
df[bool_cols] = df[bool_cols].astype(bool)
|
|
1632
|
+
|
|
1633
|
+
return df
|
|
1634
|
+
|
|
1635
|
+
|
|
1636
|
+
def list_semantic_model_errors(
|
|
1637
|
+
dataset: str | UUID, workspace: Optional[str | UUID]
|
|
1638
|
+
) -> pd.DataFrame:
|
|
1639
|
+
"""
|
|
1640
|
+
Shows a list of a semantic model's errors and their error messages (if they exist).
|
|
1641
|
+
|
|
1642
|
+
Parameters
|
|
1643
|
+
----------
|
|
1644
|
+
dataset : str | uuid.UUID
|
|
1645
|
+
Name or ID of the semantic model.
|
|
1646
|
+
workspace : str | uuid.UUID, default=None
|
|
1647
|
+
The Fabric workspace name or ID.
|
|
1648
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1649
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1650
|
+
|
|
1651
|
+
Returns
|
|
1652
|
+
-------
|
|
1653
|
+
pandas.DataFrame
|
|
1654
|
+
A pandas dataframe showing a list of the errors and error messages for a given semantic model.
|
|
1655
|
+
"""
|
|
1656
|
+
|
|
1657
|
+
from sempy_labs.tom import connect_semantic_model
|
|
1658
|
+
|
|
1659
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1660
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
|
|
1661
|
+
dataset, workspace=workspace_id
|
|
1662
|
+
)
|
|
1663
|
+
|
|
1664
|
+
error_rows = []
|
|
1665
|
+
|
|
1666
|
+
with connect_semantic_model(
|
|
1667
|
+
dataset=dataset_id, workspace=workspace_id, readonly=True
|
|
1668
|
+
) as tom:
|
|
1669
|
+
# Define mappings of TOM objects to object types and attributes
|
|
1670
|
+
error_checks = [
|
|
1671
|
+
("Column", tom.all_columns, lambda o: o.ErrorMessage),
|
|
1672
|
+
("Partition", tom.all_partitions, lambda o: o.ErrorMessage),
|
|
1673
|
+
(
|
|
1674
|
+
"Partition - Data Coverage Expression",
|
|
1675
|
+
tom.all_partitions,
|
|
1676
|
+
lambda o: (
|
|
1677
|
+
o.DataCoverageDefinition.ErrorMessage
|
|
1678
|
+
if o.DataCoverageDefinition
|
|
1679
|
+
else ""
|
|
1680
|
+
),
|
|
1681
|
+
),
|
|
1682
|
+
("Row Level Security", tom.all_rls, lambda o: o.ErrorMessage),
|
|
1683
|
+
("Calculation Item", tom.all_calculation_items, lambda o: o.ErrorMessage),
|
|
1684
|
+
("Measure", tom.all_measures, lambda o: o.ErrorMessage),
|
|
1685
|
+
(
|
|
1686
|
+
"Measure - Detail Rows Expression",
|
|
1687
|
+
tom.all_measures,
|
|
1688
|
+
lambda o: (
|
|
1689
|
+
o.DetailRowsDefinition.ErrorMessage
|
|
1690
|
+
if o.DetailRowsDefinition
|
|
1691
|
+
else ""
|
|
1692
|
+
),
|
|
1693
|
+
),
|
|
1694
|
+
(
|
|
1695
|
+
"Measure - Format String Expression",
|
|
1696
|
+
tom.all_measures,
|
|
1697
|
+
lambda o: (
|
|
1698
|
+
o.FormatStringDefinition.ErrorMessage
|
|
1699
|
+
if o.FormatStringDefinition
|
|
1700
|
+
else ""
|
|
1701
|
+
),
|
|
1702
|
+
),
|
|
1703
|
+
(
|
|
1704
|
+
"Calculation Group - Multiple or Empty Selection Expression",
|
|
1705
|
+
tom.all_calculation_groups,
|
|
1706
|
+
lambda o: (
|
|
1707
|
+
o.CalculationGroup.MultipleOrEmptySelectionExpression.ErrorMessage
|
|
1708
|
+
if o.CalculationGroup.MultipleOrEmptySelectionExpression
|
|
1709
|
+
else ""
|
|
1710
|
+
),
|
|
1711
|
+
),
|
|
1712
|
+
(
|
|
1713
|
+
"Calculation Group - No Selection Expression",
|
|
1714
|
+
tom.all_calculation_groups,
|
|
1715
|
+
lambda o: (
|
|
1716
|
+
o.CalculationGroup.NoSelectionExpression.ErrorMessage
|
|
1717
|
+
if o.CalculationGroup.NoSelectionExpression
|
|
1718
|
+
else ""
|
|
1719
|
+
),
|
|
1720
|
+
),
|
|
1721
|
+
]
|
|
1722
|
+
|
|
1723
|
+
# Iterate over all error checks
|
|
1724
|
+
for object_type, getter, error_extractor in error_checks:
|
|
1725
|
+
for obj in getter():
|
|
1726
|
+
error_message = error_extractor(obj)
|
|
1727
|
+
if error_message: # Only add rows if there's an error message
|
|
1728
|
+
error_rows.append(
|
|
1729
|
+
{
|
|
1730
|
+
"Object Type": object_type,
|
|
1731
|
+
"Table Name": obj.Parent.Name,
|
|
1732
|
+
"Object Name": obj.Name,
|
|
1733
|
+
"Error Message": error_message,
|
|
1734
|
+
}
|
|
1735
|
+
)
|
|
1736
|
+
|
|
1737
|
+
return pd.DataFrame(error_rows)
|