semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (108) hide show
  1. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
  2. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
  3. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +38 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +138 -25
  44. sempy_labs/_capacity_migration.py +161 -60
  45. sempy_labs/_clear_cache.py +3 -3
  46. sempy_labs/_data_pipelines.py +54 -0
  47. sempy_labs/_dataflows.py +4 -0
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +190 -0
  53. sempy_labs/_generate_semantic_model.py +26 -4
  54. sempy_labs/_git.py +15 -15
  55. sempy_labs/_helper_functions.py +186 -11
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +6 -3
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_warehouses.py +2 -0
  62. sempy_labs/_ml_experiments.py +6 -0
  63. sempy_labs/_ml_models.py +6 -0
  64. sempy_labs/_model_bpa.py +11 -6
  65. sempy_labs/_model_bpa_bulk.py +14 -30
  66. sempy_labs/_model_bpa_rules.py +8 -3
  67. sempy_labs/_notebooks.py +111 -15
  68. sempy_labs/_query_scale_out.py +8 -6
  69. sempy_labs/_refresh_semantic_model.py +299 -49
  70. sempy_labs/_spark.py +12 -5
  71. sempy_labs/_sql.py +2 -2
  72. sempy_labs/_translations.py +16 -14
  73. sempy_labs/_vertipaq.py +127 -116
  74. sempy_labs/_warehouses.py +90 -1
  75. sempy_labs/_workloads.py +128 -0
  76. sempy_labs/_workspace_identity.py +4 -4
  77. sempy_labs/_workspaces.py +14 -1
  78. sempy_labs/admin/__init__.py +2 -0
  79. sempy_labs/admin/_basic_functions.py +203 -58
  80. sempy_labs/admin/_domains.py +18 -18
  81. sempy_labs/directlake/__init__.py +2 -0
  82. sempy_labs/directlake/_directlake_schema_sync.py +2 -6
  83. sempy_labs/directlake/_dl_helper.py +4 -1
  84. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +3 -2
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  91. sempy_labs/lakehouse/_shortcuts.py +4 -0
  92. sempy_labs/migration/_create_pqt_file.py +2 -2
  93. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  94. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  95. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  96. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  97. sempy_labs/migration/_migration_validation.py +2 -0
  98. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  99. sempy_labs/report/__init__.py +4 -1
  100. sempy_labs/report/_generate_report.py +16 -14
  101. sempy_labs/report/_paginated.py +74 -0
  102. sempy_labs/report/_report_bpa.py +8 -10
  103. sempy_labs/report/_report_functions.py +19 -19
  104. sempy_labs/report/_report_rebind.py +6 -1
  105. sempy_labs/report/_reportwrapper.py +3 -3
  106. sempy_labs/tom/_model.py +173 -67
  107. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  108. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,8 @@ from uuid import UUID
13
13
  import sempy_labs._icons as icons
14
14
  import urllib.parse
15
15
  from azure.core.credentials import TokenCredential, AccessToken
16
+ import numpy as np
17
+ from IPython.display import display, HTML
16
18
 
17
19
 
18
20
  def create_abfss_path(
@@ -961,15 +963,15 @@ class FabricTokenCredential(TokenCredential):
961
963
  **kwargs: any,
962
964
  ) -> AccessToken:
963
965
 
964
- from notebookutils import mssparkutils
966
+ import notebookutils
965
967
 
966
- token = mssparkutils.credentials.getToken(scopes)
968
+ token = notebookutils.credentials.getToken(scopes)
967
969
  access_token = AccessToken(token, 0)
968
970
 
969
971
  return access_token
970
972
 
971
973
 
972
- def get_adls_client(account_name):
974
+ def _get_adls_client(account_name):
973
975
 
974
976
  from azure.storage.filedatalake import DataLakeServiceClient
975
977
 
@@ -1017,19 +1019,22 @@ def get_language_codes(languages: str | List[str]):
1017
1019
  return languages
1018
1020
 
1019
1021
 
1020
- def get_azure_token_credentials(
1022
+ def _get_azure_token_credentials(
1021
1023
  key_vault_uri: str,
1022
1024
  key_vault_tenant_id: str,
1023
1025
  key_vault_client_id: str,
1024
1026
  key_vault_client_secret: str,
1027
+ audience: str = "https://management.azure.com/.default",
1025
1028
  ) -> Tuple[str, str, dict]:
1026
1029
 
1027
- from notebookutils import mssparkutils
1030
+ import notebookutils
1028
1031
  from azure.identity import ClientSecretCredential
1029
1032
 
1030
- tenant_id = mssparkutils.credentials.getSecret(key_vault_uri, key_vault_tenant_id)
1031
- client_id = mssparkutils.credentials.getSecret(key_vault_uri, key_vault_client_id)
1032
- client_secret = mssparkutils.credentials.getSecret(
1033
+ # "https://analysis.windows.net/powerbi/api/.default"
1034
+
1035
+ tenant_id = notebookutils.credentials.getSecret(key_vault_uri, key_vault_tenant_id)
1036
+ client_id = notebookutils.credentials.getSecret(key_vault_uri, key_vault_client_id)
1037
+ client_secret = notebookutils.credentials.getSecret(
1033
1038
  key_vault_uri, key_vault_client_secret
1034
1039
  )
1035
1040
 
@@ -1037,7 +1042,7 @@ def get_azure_token_credentials(
1037
1042
  tenant_id=tenant_id, client_id=client_id, client_secret=client_secret
1038
1043
  )
1039
1044
 
1040
- token = credential.get_token("https://management.azure.com/.default").token
1045
+ token = credential.get_token(audience).token
1041
1046
 
1042
1047
  headers = {
1043
1048
  "Authorization": f"Bearer {token}",
@@ -1077,7 +1082,7 @@ def resolve_environment_id(environment: str, workspace: Optional[str] = None) ->
1077
1082
  )
1078
1083
 
1079
1084
 
1080
- def make_clickable(val):
1085
+ def _make_clickable(val):
1081
1086
 
1082
1087
  return f'<a target="_blank" href="{val}">{val}</a>'
1083
1088
 
@@ -1132,7 +1137,7 @@ def generate_guid():
1132
1137
  return str(uuid.uuid4())
1133
1138
 
1134
1139
 
1135
- def get_max_run_id(lakehouse: str, table_name: str) -> int:
1140
+ def _get_max_run_id(lakehouse: str, table_name: str) -> int:
1136
1141
 
1137
1142
  from pyspark.sql import SparkSession
1138
1143
 
@@ -1142,3 +1147,173 @@ def get_max_run_id(lakehouse: str, table_name: str) -> int:
1142
1147
  max_run_id = dfSpark.collect()[0][0]
1143
1148
 
1144
1149
  return max_run_id
1150
+
1151
+
1152
+ def _make_list_unique(my_list):
1153
+
1154
+ return list(set(my_list))
1155
+
1156
+
1157
+ def _get_partition_map(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
1158
+
1159
+ if workspace is None:
1160
+ workspace = fabric.resolve_workspace_name()
1161
+
1162
+ partitions = fabric.evaluate_dax(
1163
+ dataset=dataset,
1164
+ workspace=workspace,
1165
+ dax_string="""
1166
+ select [ID] AS [PartitionID], [TableID], [Name] AS [PartitionName] from $system.tmschema_partitions
1167
+ """,
1168
+ )
1169
+
1170
+ tables = fabric.evaluate_dax(
1171
+ dataset=dataset,
1172
+ workspace=workspace,
1173
+ dax_string="""
1174
+ select [ID] AS [TableID], [Name] AS [TableName] from $system.tmschema_tables
1175
+ """,
1176
+ )
1177
+
1178
+ partition_map = pd.merge(partitions, tables, on="TableID", how="left")
1179
+ partition_map["PartitionID"] = partition_map["PartitionID"].astype(str)
1180
+ partition_counts = partition_map.groupby("TableID")["PartitionID"].transform(
1181
+ "count"
1182
+ )
1183
+ partition_map["Object Name"] = partition_map.apply(
1184
+ lambda row: (
1185
+ f"'{row['TableName']}'[{row['PartitionName']}]"
1186
+ if partition_counts[row.name] > 1
1187
+ else row["TableName"]
1188
+ ),
1189
+ axis=1,
1190
+ )
1191
+ return partition_map
1192
+
1193
+
1194
+ def _show_chart(spec, title):
1195
+
1196
+ h = f"""
1197
+ <!DOCTYPE html>
1198
+ <html>
1199
+ <head>
1200
+ <script src="https://cdn.jsdelivr.net/npm/vega@5"></script>
1201
+ <script src="https://cdn.jsdelivr.net/npm/vega-lite@5"></script>
1202
+ <script src="https://cdn.jsdelivr.net/npm/vega-embed@6"></script>
1203
+ <style>
1204
+ table, th, td {{
1205
+ border: 10px solid #e7e9eb;
1206
+ border-collapse: collapse;
1207
+ }}
1208
+ </style>
1209
+ </head>
1210
+ <body>
1211
+ <table>
1212
+ <tr>
1213
+ <td style="text-align: center;">
1214
+ <h1>{title}</h1>
1215
+ </td>
1216
+ </tr>
1217
+ <tr>
1218
+ <td>
1219
+ <div id="vis"></div>
1220
+ </td>
1221
+ </tr>
1222
+ </table>
1223
+ <script type="text/javascript">
1224
+ var spec = {spec};
1225
+ var opt = {{"renderer": "canvas", "actions": false}};
1226
+ vegaEmbed("#vis", spec, opt);
1227
+ </script>
1228
+ </body>
1229
+ </html>"""
1230
+
1231
+ display(HTML(h))
1232
+
1233
+
1234
+ def _process_and_display_chart(df, title, widget):
1235
+
1236
+ # Convert time columns to milliseconds
1237
+ df["Start"] = df["Start Time"].astype(np.int64) / int(1e6)
1238
+ df["End"] = df["End Time"].astype(np.int64) / int(1e6)
1239
+
1240
+ # Calculate the time offset for proper Gantt chart rendering
1241
+ Offset = min(df["Start"])
1242
+ df["Start"] = df["Start"] - Offset
1243
+ df["End"] = df["End"] - Offset
1244
+
1245
+ # Vega-Lite spec for Gantt chart
1246
+ spec = (
1247
+ """{
1248
+ "$schema": "https://vega.github.io/schema/vega-lite/v5.json",
1249
+ "description": "A simple bar chart with ranged data (aka Gantt Chart).",
1250
+ "data": { "values": """
1251
+ + df.to_json(orient="records")
1252
+ + """ },
1253
+ "width": 700,
1254
+ "height": 400,
1255
+ "mark": "bar",
1256
+ "encoding": {
1257
+ "y": {
1258
+ "field": "Object Name",
1259
+ "type": "ordinal",
1260
+ "axis": {
1261
+ "labelFontSize": 15,
1262
+ "titleFontSize": 20,
1263
+ "title": "Object"
1264
+ }
1265
+ },
1266
+ "x": {
1267
+ "field": "Start",
1268
+ "type": "quantitative",
1269
+ "title": "milliseconds",
1270
+ "axis": {
1271
+ "titleFontSize": 20
1272
+ }
1273
+ },
1274
+ "x2": {"field": "End"},
1275
+ "color": {
1276
+ "field": "Event Subclass",
1277
+ "scale": {
1278
+ "domain": ["Process", "ExecuteSql"],
1279
+ "range": ["#FFC000","#0070C0"]
1280
+ },
1281
+ "legend": {
1282
+ "labelFontSize": 20,
1283
+ "titleFontSize": 20,
1284
+ "title": "Event Type"
1285
+ }
1286
+ },
1287
+ "tooltip": [
1288
+ {"field": "Duration", "type": "quantitative", "format": ","},
1289
+ {"field": "Cpu Time", "type": "quantitative", "format": ","},
1290
+ {"field": "Event Subclass", "type": "nominal"}
1291
+ ]
1292
+ }
1293
+ }"""
1294
+ )
1295
+
1296
+ with widget:
1297
+ widget.clear_output(wait=True)
1298
+ _show_chart(spec, title=title)
1299
+
1300
+
1301
+ def _convert_data_type(input_data_type: str) -> str:
1302
+
1303
+ data_type_mapping = {
1304
+ "string": "String",
1305
+ "int": "Int64",
1306
+ "tinyint": "Int64",
1307
+ "smallint": "Int64",
1308
+ "bigint": "Int64",
1309
+ "boolean": "Boolean",
1310
+ "timestamp": "DateTime",
1311
+ "date": "DateTime",
1312
+ "double": "Double",
1313
+ "float": "Double",
1314
+ }
1315
+
1316
+ if "decimal" in input_data_type:
1317
+ return "Decimal"
1318
+ else:
1319
+ return data_type_mapping.get(input_data_type)
sempy_labs/_icons.py CHANGED
@@ -10,20 +10,6 @@ bullet = "\u2022"
10
10
  warning = "⚠️"
11
11
  error = "\u274C"
12
12
  info = "ℹ️"
13
- data_type_mapping = {
14
- "string": "String",
15
- "int": "Int64",
16
- "tinyint": "Int64",
17
- "smallint": "Int64",
18
- "bigint": "Int64",
19
- "boolean": "Boolean",
20
- "timestamp": "DateTime",
21
- "date": "DateTime",
22
- "decimal(38,18)": "Decimal",
23
- "decimal(19,4)": "Decimal",
24
- "double": "Double",
25
- "float": "Double",
26
- }
27
13
  measure_icon = "\u2211"
28
14
  table_icon = "\u229E"
29
15
  column_icon = "\u229F"
@@ -93,14 +79,15 @@ sku_mapping = {
93
79
  "P4": "F512",
94
80
  "P5": "F1024",
95
81
  }
96
- refreshTypes = [
97
- "full",
98
- "automatic",
99
- "dataOnly",
100
- "calculate",
101
- "clearValues",
102
- "defragment",
103
- ]
82
+
83
+ refresh_type_mapping = {
84
+ "full": "full",
85
+ "auto": "automatic",
86
+ "data": "dataOnly",
87
+ "calc": "calculate",
88
+ "clear": "clearValues",
89
+ "defrag": "defragment",
90
+ }
104
91
 
105
92
  itemTypes = {
106
93
  "DataPipeline": "dataPipelines",
@@ -114,3 +101,49 @@ itemTypes = {
114
101
  "Warehouse": "warehouses",
115
102
  }
116
103
  default_schema = "dbo"
104
+
105
+ data_type_string = "string"
106
+ data_type_long = "long"
107
+ data_type_timestamp = "timestamp"
108
+ data_type_double = "double"
109
+ data_type_bool = "bool"
110
+ int_format = "int"
111
+ pct_format = "pct"
112
+ no_format = ""
113
+
114
+ bpa_schema = {
115
+ "Capacity Name": data_type_string,
116
+ "Capacity Id": data_type_string,
117
+ "Workspace Name": data_type_string,
118
+ "Workspace Id": data_type_string,
119
+ "Dataset Name": data_type_string,
120
+ "Dataset Id": data_type_string,
121
+ "Configured By": data_type_string,
122
+ "Rule Name": data_type_string,
123
+ "Category": data_type_string,
124
+ "Severity": data_type_string,
125
+ "Object Type": data_type_string,
126
+ "Object Name": data_type_string,
127
+ "Description": data_type_string,
128
+ "URL": data_type_string,
129
+ "RunId": data_type_long,
130
+ "Timestamp": data_type_timestamp,
131
+ }
132
+
133
+ sll_ann_name = "PBI_ProTooling"
134
+ sll_prefix = "SLL_"
135
+ sll_tags = []
136
+ base_cols = ["EventClass", "EventSubclass", "CurrentTime", "TextData"]
137
+ end_cols = base_cols + [
138
+ "StartTime",
139
+ "EndTime",
140
+ "Duration",
141
+ "CpuTime",
142
+ "Success",
143
+ "IntegerData",
144
+ "ObjectID",
145
+ ]
146
+ refresh_event_schema = {
147
+ "JobGraph": base_cols,
148
+ "ProgressReportEnd": end_cols,
149
+ }
@@ -14,6 +14,8 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the KQL databases within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List KQL Databases <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/list-kql-databases`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -72,6 +74,8 @@ def create_kql_database(
72
74
  """
73
75
  Creates a KQL database.
74
76
 
77
+ This is a wrapper function for the following API: `Items - Create KQL Database <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/create-kql-database`_.
78
+
75
79
  Parameters
76
80
  ----------
77
81
  name: str
@@ -107,6 +111,8 @@ def delete_kql_database(name: str, workspace: Optional[str] = None):
107
111
  """
108
112
  Deletes a KQL database.
109
113
 
114
+ This is a wrapper function for the following API: `Items - Delete KQL Database <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/delete-kql-database`_.
115
+
110
116
  Parameters
111
117
  ----------
112
118
  name: str
@@ -14,6 +14,8 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the KQL querysets within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List KQL Querysets <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/list-kql-querysets`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -62,6 +64,8 @@ def create_kql_queryset(
62
64
  """
63
65
  Creates a KQL queryset.
64
66
 
67
+ This is a wrapper function for the following API: `Items - Create KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/create-kql-queryset`_.
68
+
65
69
  Parameters
66
70
  ----------
67
71
  name: str
@@ -97,6 +101,8 @@ def delete_kql_queryset(name: str, workspace: Optional[str] = None):
97
101
  """
98
102
  Deletes a KQL queryset.
99
103
 
104
+ This is a wrapper function for the following API: `Items - Delete KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/delete-kql-queryset`_.
105
+
100
106
  Parameters
101
107
  ----------
102
108
  name: str
@@ -1487,7 +1487,7 @@ def list_semantic_model_object_report_usage(
1487
1487
  is sorted descending by 'Report Usage Count'.
1488
1488
  """
1489
1489
 
1490
- from sempy_labs._model_dependencies import get_measure_dependencies
1490
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
1491
1491
  from sempy_labs._helper_functions import format_dax_object_name
1492
1492
 
1493
1493
  workspace = fabric.resolve_workspace_name(workspace)
@@ -1503,7 +1503,7 @@ def list_semantic_model_object_report_usage(
1503
1503
  )
1504
1504
  else:
1505
1505
  df = pd.DataFrame(columns=["Table Name", "Object Name", "Object Type"])
1506
- dep = get_measure_dependencies(dataset=dataset, workspace=workspace)
1506
+ dep = get_model_calc_dependencies(dataset=dataset, workspace=workspace)
1507
1507
 
1508
1508
  for i, r in dfR.iterrows():
1509
1509
  object_type = r["Object Type"]
@@ -1515,7 +1515,10 @@ def list_semantic_model_object_report_usage(
1515
1515
  "Object Type": object_type,
1516
1516
  }
1517
1517
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1518
- if object_type == "Measure":
1518
+ df["Object Type"] = df["Object Type"].replace(
1519
+ "Attribute Hierarchy", "Column"
1520
+ )
1521
+ if object_type in ["Measure", "Calc Column", "Calc Table", "Hierarchy"]:
1519
1522
  df_filt = dep[dep["Object Name"] == object_name][
1520
1523
  ["Referenced Table", "Referenced Object", "Referenced Object Type"]
1521
1524
  ]
@@ -0,0 +1,166 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ lro,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
12
+
13
+
14
+ def create_managed_private_endpoint(
15
+ name: str,
16
+ target_private_link_resource_id: UUID,
17
+ target_subresource_type: str,
18
+ request_message: Optional[str] = None,
19
+ workspace: Optional[str] = None,
20
+ ):
21
+ """
22
+ Creates a managed private endpoint.
23
+
24
+ This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`.
25
+
26
+ Parameters
27
+ ----------
28
+ name: str
29
+ Name of the managed private endpoint.
30
+ target_private_link_resource_id: UUID
31
+ Resource Id of data source for which private endpoint needs to be created.
32
+ target_subresource_type : str
33
+ Sub-resource pointing to Private-link resoure.
34
+ request_message : str, default=None
35
+ Message to approve private endpoint request. Should not be more than 140 characters.
36
+ workspace : str, default=None
37
+ The Fabric workspace name.
38
+ Defaults to None which resolves to the workspace of the attached lakehouse
39
+ or if no lakehouse attached, resolves to the workspace of the notebook.
40
+ """
41
+
42
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
43
+
44
+ request_body = {
45
+ "name": name,
46
+ "targetPrivateLinkResourceId": target_private_link_resource_id,
47
+ "targetSubresourceType": target_subresource_type,
48
+ }
49
+
50
+ if request_message is not None:
51
+ if len(request_message) > 140:
52
+ raise ValueError(
53
+ f"{icons.red_dot} The request message cannot be more than 140 characters."
54
+ )
55
+ request_body["requestMessage"] = request_message
56
+
57
+ client = fabric.FabricRestClient()
58
+ response = client.post(
59
+ f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints", json=request_body
60
+ )
61
+
62
+ lro(client, response, status_codes=[201, 202])
63
+
64
+ print(
65
+ f"{icons.green_dot} The '{name}' managed private endpoint has been created within the '{workspace}' workspace."
66
+ )
67
+
68
+
69
+ def list_managed_private_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
70
+ """
71
+ Shows the managed private endpoints within a workspace.
72
+
73
+ This is a wrapper function for the following API: `Managed Private Endpoints - List Workspace Managed Private Endpoints <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-workspace-managed-private-endpoints>`.
74
+
75
+ Parameters
76
+ ----------
77
+ workspace : str, default=None
78
+ The Fabric workspace name.
79
+ Defaults to None which resolves to the workspace of the attached lakehouse
80
+ or if no lakehouse attached, resolves to the workspace of the notebook.
81
+
82
+ Returns
83
+ -------
84
+ pandas.DataFrame
85
+ A pandas dataframe showing the managed private endpoints within a workspace.
86
+ """
87
+
88
+ df = pd.DataFrame(
89
+ columns=[
90
+ "Managed Private Endpoint Name",
91
+ "Managed Private Endpoint Id",
92
+ "Target Private Link Resource Id",
93
+ "Provisioning State",
94
+ "Connection Status",
95
+ "Connection Description",
96
+ "Target Subresource Type",
97
+ ]
98
+ )
99
+
100
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
101
+
102
+ client = fabric.FabricRestClient()
103
+ response = client.get(f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints")
104
+ if response.status_code != 200:
105
+ raise FabricHTTPException(response)
106
+
107
+ responses = pagination(client, response)
108
+
109
+ for r in responses:
110
+ for v in r.get("value", []):
111
+ conn = v.get("connectionState", {})
112
+ new_data = {
113
+ "Managed Private Endpoint Name": v.get("name"),
114
+ "Managed Private Endpoint Id": v.get("id"),
115
+ "Target Private Link Resource Id": v.get("targetPrivateLinkResourceId"),
116
+ "Provisioning State": v.get("provisioningState"),
117
+ "Connection Status": conn.get("status"),
118
+ "Connection Description": conn.get("description"),
119
+ "Target Subresource Type": v.get("targetSubresourceType"),
120
+ }
121
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
122
+
123
+ return df
124
+
125
+
126
+ def delete_managed_private_endpoint(
127
+ managed_private_endpoint: str, workspace: Optional[str] = None
128
+ ):
129
+ """
130
+ Deletes a Fabric managed private endpoint.
131
+
132
+ This is a wrapper function for the following API: `Managed Private Endpoints - Delete Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/delete-workspace-managed-private-endpoint>`.
133
+
134
+ Parameters
135
+ ----------
136
+ managed_private_endpoint: str
137
+ Name of the managed private endpoint.
138
+ workspace : str, default=None
139
+ The Fabric workspace name.
140
+ Defaults to None which resolves to the workspace of the attached lakehouse
141
+ or if no lakehouse attached, resolves to the workspace of the notebook.
142
+ """
143
+
144
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
145
+
146
+ df = list_managed_private_endpoints(workspace=workspace)
147
+ df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
148
+
149
+ if len(df_filt) == 0:
150
+ raise ValueError(
151
+ f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the '{workspace}' workspace."
152
+ )
153
+
154
+ item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
155
+
156
+ client = fabric.FabricRestClient()
157
+ response = client.delete(
158
+ f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}"
159
+ )
160
+
161
+ if response.status_code != 200:
162
+ raise FabricHTTPException(response)
163
+
164
+ print(
165
+ f"{icons.green_dot} The '{managed_private_endpoint}' managed private endpoint within the '{workspace}' workspace has been deleted."
166
+ )
@@ -12,6 +12,8 @@ def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
12
12
  """
13
13
  Shows the mirrored warehouses within a workspace.
14
14
 
15
+ This is a wrapper function for the following API: `Items - List Mirrored Warehouses <https://learn.microsoft.com/rest/api/fabric/mirroredwarehouse/items/list-mirrored-warehouses`_.
16
+
15
17
  Parameters
16
18
  ----------
17
19
  workspace : str, default=None
@@ -14,6 +14,8 @@ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the ML experiments within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List ML Experiments <https://learn.microsoft.com/rest/api/fabric/mlexperiment/items/list-ml-experiments`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -60,6 +62,8 @@ def create_ml_experiment(
60
62
  """
61
63
  Creates a Fabric ML experiment.
62
64
 
65
+ This is a wrapper function for the following API: `Items - Create ML Experiment <https://learn.microsoft.com/rest/api/fabric/mlexperiment/items/create-ml-experiment`_.
66
+
63
67
  Parameters
64
68
  ----------
65
69
  name: str
@@ -95,6 +99,8 @@ def delete_ml_experiment(name: str, workspace: Optional[str] = None):
95
99
  """
96
100
  Deletes a Fabric ML experiment.
97
101
 
102
+ This is a wrapper function for the following API: `Items - Delete ML Experiment <https://learn.microsoft.com/rest/api/fabric/mlexperiment/items/delete-ml-experiment`_.
103
+
98
104
  Parameters
99
105
  ----------
100
106
  name: str
sempy_labs/_ml_models.py CHANGED
@@ -14,6 +14,8 @@ def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the ML models within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List ML Models <https://learn.microsoft.com/rest/api/fabric/mlmodel/items/list-ml-models`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -60,6 +62,8 @@ def create_ml_model(
60
62
  """
61
63
  Creates a Fabric ML model.
62
64
 
65
+ This is a wrapper function for the following API: `Items - Create ML Model <https://learn.microsoft.com/rest/api/fabric/mlmodel/items/create-ml-model`_.
66
+
63
67
  Parameters
64
68
  ----------
65
69
  name: str
@@ -93,6 +97,8 @@ def delete_ml_model(name: str, workspace: Optional[str] = None):
93
97
  """
94
98
  Deletes a Fabric ML model.
95
99
 
100
+ This is a wrapper function for the following API: `Items - Delete ML Model <https://learn.microsoft.com/rest/api/fabric/mlmodel/items/delete-ml-model`_.
101
+
96
102
  Parameters
97
103
  ----------
98
104
  name: str