semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (109) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +53 -1
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +118 -1
  47. sempy_labs/_dax.py +189 -3
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +6 -4
  53. sempy_labs/_generate_semantic_model.py +26 -3
  54. sempy_labs/_git.py +14 -14
  55. sempy_labs/_helper_functions.py +197 -1
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +1 -1
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_databases.py +428 -0
  62. sempy_labs/_mirrored_warehouses.py +2 -0
  63. sempy_labs/_ml_experiments.py +6 -0
  64. sempy_labs/_ml_models.py +7 -1
  65. sempy_labs/_model_bpa.py +215 -181
  66. sempy_labs/_model_bpa_bulk.py +46 -42
  67. sempy_labs/_model_bpa_rules.py +8 -3
  68. sempy_labs/_model_dependencies.py +41 -87
  69. sempy_labs/_notebooks.py +107 -12
  70. sempy_labs/_query_scale_out.py +8 -6
  71. sempy_labs/_refresh_semantic_model.py +299 -49
  72. sempy_labs/_spark.py +12 -5
  73. sempy_labs/_translations.py +2 -0
  74. sempy_labs/_vertipaq.py +89 -86
  75. sempy_labs/_warehouses.py +79 -0
  76. sempy_labs/_workloads.py +128 -0
  77. sempy_labs/_workspace_identity.py +4 -4
  78. sempy_labs/_workspaces.py +14 -1
  79. sempy_labs/admin/__init__.py +2 -0
  80. sempy_labs/admin/_basic_functions.py +131 -43
  81. sempy_labs/admin/_domains.py +18 -18
  82. sempy_labs/directlake/__init__.py +2 -0
  83. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  84. sempy_labs/directlake/_dl_helper.py +4 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +2 -1
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/directlake/_warm_cache.py +10 -9
  91. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  92. sempy_labs/lakehouse/_shortcuts.py +4 -0
  93. sempy_labs/migration/_create_pqt_file.py +5 -2
  94. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  95. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  96. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  97. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  98. sempy_labs/migration/_migration_validation.py +2 -0
  99. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  100. sempy_labs/report/__init__.py +6 -1
  101. sempy_labs/report/_download_report.py +75 -0
  102. sempy_labs/report/_generate_report.py +6 -0
  103. sempy_labs/report/_paginated.py +74 -0
  104. sempy_labs/report/_report_functions.py +6 -0
  105. sempy_labs/report/_report_rebind.py +2 -0
  106. sempy_labs/report/_reportwrapper.py +4 -2
  107. sempy_labs/tom/_model.py +135 -68
  108. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
  109. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
@@ -14,6 +14,8 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the eventstreams within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List Eventstreams <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventstreams>`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -56,6 +58,8 @@ def create_eventstream(
56
58
  """
57
59
  Creates a Fabric eventstream.
58
60
 
61
+ This is a wrapper function for the following API: `Items - Create Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventstream>`_.
62
+
59
63
  Parameters
60
64
  ----------
61
65
  name: str
@@ -91,6 +95,8 @@ def delete_eventstream(name: str, workspace: Optional[str] = None):
91
95
  """
92
96
  Deletes a Fabric eventstream.
93
97
 
98
+ This is a wrapper function for the following API: `Items - Delete Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventstream>`_.
99
+
94
100
  Parameters
95
101
  ----------
96
102
  name: str
@@ -20,6 +20,8 @@ def create_external_data_share(
20
20
  """
21
21
  Creates an external data share for a given path or list of paths in the specified item.
22
22
 
23
+ This is a wrapper function for the following API: `External Data Shares - Create External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/create-external-data-share>`_.
24
+
23
25
  Parameters
24
26
  ----------
25
27
  item_name : str
@@ -71,6 +73,8 @@ def revoke_external_data_share(
71
73
  """
72
74
  Revokes the specified external data share. Note: This action cannot be undone.
73
75
 
76
+ This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/revoke-external-data-share`_.
77
+
74
78
  Parameters
75
79
  ----------
76
80
  external_data_share_id : UUID
@@ -85,8 +89,6 @@ def revoke_external_data_share(
85
89
  or if no lakehouse attached, resolves to the workspace of the notebook.
86
90
  """
87
91
 
88
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/revoke-external-data-share?tabs=HTTP
89
-
90
92
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
91
93
  item_id = fabric.resolve_item_id(
92
94
  item_name=item_name, type=item_type, workspace=workspace
@@ -111,6 +113,8 @@ def list_external_data_shares_in_item(
111
113
  """
112
114
  Returns a list of the external data shares that exist for the specified item.
113
115
 
116
+ This is a wrapper function for the following API: `External Data Shares - List External Data Shares In Item <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item`_.
117
+
114
118
  Parameters
115
119
  ----------
116
120
  item_name : str
@@ -128,8 +132,6 @@ def list_external_data_shares_in_item(
128
132
  A pandas dataframe showing a list of the external data shares that exist for the specified item.
129
133
  """
130
134
 
131
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item?tabs=HTTP
132
-
133
135
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
134
136
  item_id = fabric.resolve_item_id(
135
137
  item_name=item_name, type=item_type, workspace=workspace
@@ -64,11 +64,28 @@ def create_blank_semantic_model(
64
64
  "name": '{dataset}',
65
65
  "compatibilityLevel": {compatibility_level},
66
66
  "model": {{
67
- "culture": "en-US",
68
- "defaultPowerBIDataSourceVersion": "powerBI_V3"
67
+ "cultures": [
68
+ {{
69
+ "name": "en-US",
70
+ "linguisticMetadata": {{
71
+ "content": {{
72
+ "Version": "1.0.0",
73
+ "Language": "en-US"
74
+ }},
75
+ "contentType": "json"
76
+ }}
77
+ }}
78
+ ],
79
+ "collation": "Latin1_General_100_BIN2_UTF8",
80
+ "dataAccessOptions": {{
81
+ "legacyRedirects": true,
82
+ "returnErrorValuesAsNull": true,
83
+ }},
84
+ "defaultPowerBIDataSourceVersion": "powerBI_V3",
85
+ "sourceQueryCulture": "en-US",
86
+ }}
69
87
  }}
70
88
  }}
71
- }}
72
89
  }}
73
90
  """
74
91
 
@@ -85,6 +102,8 @@ def create_semantic_model_from_bim(
85
102
  """
86
103
  Creates a new semantic model based on a Model.bim file.
87
104
 
105
+ This is a wrapper function for the following API: `Items - Create Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/create-semantic-model>`_.
106
+
88
107
  Parameters
89
108
  ----------
90
109
  dataset : str
@@ -149,6 +168,8 @@ def update_semantic_model_from_bim(
149
168
  """
150
169
  Updates a semantic model definition based on a Model.bim file.
151
170
 
171
+ This is a wrapper function for the following API: `Items - Update Semantic Model Definition <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/update-semantic-model-definition>`_.
172
+
152
173
  Parameters
153
174
  ----------
154
175
  dataset : str
@@ -287,6 +308,8 @@ def get_semantic_model_bim(
287
308
  """
288
309
  Extracts the Model.bim file for a given semantic model.
289
310
 
311
+ This is a wrapper function for the following API: `Items - Get Semantic Model Definition <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/get-semantic-model-definition>`_.
312
+
290
313
  Parameters
291
314
  ----------
292
315
  dataset : str
sempy_labs/_git.py CHANGED
@@ -21,6 +21,8 @@ def connect_workspace_to_git(
21
21
  """
22
22
  Connects a workspace to a git repository.
23
23
 
24
+ This is a wrapper function for the following API: `Git - Connect <https://learn.microsoft.com/rest/api/fabric/core/git/connect>`_.
25
+
24
26
  Parameters
25
27
  ----------
26
28
  organization_name : str
@@ -41,8 +43,6 @@ def connect_workspace_to_git(
41
43
  or if no lakehouse attached, resolves to the workspace of the notebook.
42
44
  """
43
45
 
44
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/connect?tabs=HTTP
45
-
46
46
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
47
47
 
48
48
  request_body = {
@@ -72,6 +72,8 @@ def disconnect_workspace_from_git(workspace: Optional[str] = None):
72
72
  """
73
73
  Disconnects a workpsace from a git repository.
74
74
 
75
+ This is a wrapper function for the following API: `Git - Disconnect <https://learn.microsoft.com/rest/api/fabric/core/git/disconnect>`_.
76
+
75
77
  Parameters
76
78
  ----------
77
79
  workspace : str, default=None
@@ -80,8 +82,6 @@ def disconnect_workspace_from_git(workspace: Optional[str] = None):
80
82
  or if no lakehouse attached, resolves to the workspace of the notebook.
81
83
  """
82
84
 
83
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/disconnect?tabs=HTTP
84
-
85
85
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
86
86
 
87
87
  client = fabric.FabricRestClient()
@@ -98,6 +98,8 @@ def get_git_status(workspace: Optional[str] = None) -> pd.DataFrame:
98
98
  """
99
99
  Obtains the Git status of items in the workspace, that can be committed to Git.
100
100
 
101
+ This is a wrapper function for the following API: `Git - Get Status <https://learn.microsoft.com/rest/api/fabric/core/git/get-status>.
102
+
101
103
  Parameters
102
104
  ----------
103
105
  workspace : str, default=None
@@ -111,8 +113,6 @@ def get_git_status(workspace: Optional[str] = None) -> pd.DataFrame:
111
113
  A pandas dataframe showing the Git status of items in the workspace.
112
114
  """
113
115
 
114
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/get-status?tabs=HTTP
115
-
116
116
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
117
117
 
118
118
  df = pd.DataFrame(
@@ -161,6 +161,8 @@ def get_git_connection(workspace: Optional[str] = None) -> pd.DataFrame:
161
161
  """
162
162
  Obtains the Git status of items in the workspace, that can be committed to Git.
163
163
 
164
+ This is a wrapper function for the following API: `Git - Get Connection <https://learn.microsoft.com/rest/api/fabric/core/git/get-connection>`_.
165
+
164
166
  Parameters
165
167
  ----------
166
168
  workspace : str, default=None
@@ -174,8 +176,6 @@ def get_git_connection(workspace: Optional[str] = None) -> pd.DataFrame:
174
176
  A pandas dataframe showing the Git status of items in the workspace.
175
177
  """
176
178
 
177
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/get-status?tabs=HTTP
178
-
179
179
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
180
180
 
181
181
  df = pd.DataFrame(
@@ -221,6 +221,8 @@ def initialize_git_connection(workspace: Optional[str] = None):
221
221
  """
222
222
  Initializes a connection for a workspace that is connected to Git.
223
223
 
224
+ This is a wrapper function for the following API: `Git - Initialize Connection <https://learn.microsoft.com/rest/api/fabric/core/git/initialize-connection>`_.
225
+
224
226
  Parameters
225
227
  ----------
226
228
  workspace : str, default=None
@@ -229,8 +231,6 @@ def initialize_git_connection(workspace: Optional[str] = None):
229
231
  or if no lakehouse attached, resolves to the workspace of the notebook.
230
232
  """
231
233
 
232
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/initialize-connection?tabs=HTTP
233
-
234
234
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
235
235
 
236
236
  client = fabric.FabricRestClient()
@@ -252,6 +252,8 @@ def commit_to_git(
252
252
  """
253
253
  Commits all or a selection of items within a workspace to Git.
254
254
 
255
+ This is a wrapper function for the following API: `Git - Commit to Git <https://learn.microsoft.com/rest/api/fabric/core/git/commit-to-git>`_.
256
+
255
257
  Parameters
256
258
  ----------
257
259
  comment : str
@@ -265,8 +267,6 @@ def commit_to_git(
265
267
  or if no lakehouse attached, resolves to the workspace of the notebook.
266
268
  """
267
269
 
268
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/commit-to-git?tabs=HTTP
269
-
270
270
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
271
271
 
272
272
  gs = get_git_status(workspace=workspace)
@@ -320,6 +320,8 @@ def update_from_git(
320
320
  """
321
321
  Updates the workspace with commits pushed to the connected branch.
322
322
 
323
+ This is a wrapper function for the following API: `Git - Update From Git <https://learn.microsoft.com/rest/api/fabric/core/git/update-from-git>`_.
324
+
323
325
  Parameters
324
326
  ----------
325
327
  workspace_head : str
@@ -336,8 +338,6 @@ def update_from_git(
336
338
  or if no lakehouse attached, resolves to the workspace of the notebook.
337
339
  """
338
340
 
339
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/update-from-git?tabs=HTTP
340
-
341
341
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
342
342
 
343
343
  conflict_resolution_policies = ["PreferWorkspace", "PreferRemote"]
@@ -13,6 +13,8 @@ from uuid import UUID
13
13
  import sempy_labs._icons as icons
14
14
  import urllib.parse
15
15
  from azure.core.credentials import TokenCredential, AccessToken
16
+ import numpy as np
17
+ from IPython.display import display, HTML
16
18
 
17
19
 
18
20
  def create_abfss_path(
@@ -1142,6 +1144,200 @@ def _get_max_run_id(lakehouse: str, table_name: str) -> int:
1142
1144
  spark = SparkSession.builder.getOrCreate()
1143
1145
  query = f"SELECT MAX(RunId) FROM {lakehouse}.{table_name}"
1144
1146
  dfSpark = spark.sql(query)
1145
- max_run_id = dfSpark.collect()[0][0]
1147
+ max_run_id = dfSpark.collect()[0][0] or 0
1146
1148
 
1147
1149
  return max_run_id
1150
+
1151
+
1152
+ def _make_list_unique(my_list):
1153
+
1154
+ return list(set(my_list))
1155
+
1156
+
1157
+ def _get_partition_map(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
1158
+
1159
+ if workspace is None:
1160
+ workspace = fabric.resolve_workspace_name()
1161
+
1162
+ partitions = fabric.evaluate_dax(
1163
+ dataset=dataset,
1164
+ workspace=workspace,
1165
+ dax_string="""
1166
+ select [ID] AS [PartitionID], [TableID], [Name] AS [PartitionName] from $system.tmschema_partitions
1167
+ """,
1168
+ )
1169
+
1170
+ tables = fabric.evaluate_dax(
1171
+ dataset=dataset,
1172
+ workspace=workspace,
1173
+ dax_string="""
1174
+ select [ID] AS [TableID], [Name] AS [TableName] from $system.tmschema_tables
1175
+ """,
1176
+ )
1177
+
1178
+ partition_map = pd.merge(partitions, tables, on="TableID", how="left")
1179
+ partition_map["PartitionID"] = partition_map["PartitionID"].astype(str)
1180
+ partition_counts = partition_map.groupby("TableID")["PartitionID"].transform(
1181
+ "count"
1182
+ )
1183
+ partition_map["Object Name"] = partition_map.apply(
1184
+ lambda row: (
1185
+ f"'{row['TableName']}'[{row['PartitionName']}]"
1186
+ if partition_counts[row.name] > 1
1187
+ else row["TableName"]
1188
+ ),
1189
+ axis=1,
1190
+ )
1191
+ return partition_map
1192
+
1193
+
1194
+ def _show_chart(spec, title):
1195
+
1196
+ h = f"""
1197
+ <!DOCTYPE html>
1198
+ <html>
1199
+ <head>
1200
+ <script src="https://cdn.jsdelivr.net/npm/vega@5"></script>
1201
+ <script src="https://cdn.jsdelivr.net/npm/vega-lite@5"></script>
1202
+ <script src="https://cdn.jsdelivr.net/npm/vega-embed@6"></script>
1203
+ <style>
1204
+ table, th, td {{
1205
+ border: 10px solid #e7e9eb;
1206
+ border-collapse: collapse;
1207
+ }}
1208
+ </style>
1209
+ </head>
1210
+ <body>
1211
+ <table>
1212
+ <tr>
1213
+ <td style="text-align: center;">
1214
+ <h1>{title}</h1>
1215
+ </td>
1216
+ </tr>
1217
+ <tr>
1218
+ <td>
1219
+ <div id="vis"></div>
1220
+ </td>
1221
+ </tr>
1222
+ </table>
1223
+ <script type="text/javascript">
1224
+ var spec = {spec};
1225
+ var opt = {{"renderer": "canvas", "actions": false}};
1226
+ vegaEmbed("#vis", spec, opt);
1227
+ </script>
1228
+ </body>
1229
+ </html>"""
1230
+
1231
+ display(HTML(h))
1232
+
1233
+
1234
+ def _process_and_display_chart(df, title, widget):
1235
+
1236
+ # Convert time columns to milliseconds
1237
+ df["Start"] = df["Start Time"].astype(np.int64) / int(1e6)
1238
+ df["End"] = df["End Time"].astype(np.int64) / int(1e6)
1239
+
1240
+ # Calculate the time offset for proper Gantt chart rendering
1241
+ Offset = min(df["Start"])
1242
+ df["Start"] = df["Start"] - Offset
1243
+ df["End"] = df["End"] - Offset
1244
+
1245
+ # Vega-Lite spec for Gantt chart
1246
+ spec = (
1247
+ """{
1248
+ "$schema": "https://vega.github.io/schema/vega-lite/v5.json",
1249
+ "description": "A simple bar chart with ranged data (aka Gantt Chart).",
1250
+ "data": { "values": """
1251
+ + df.to_json(orient="records")
1252
+ + """ },
1253
+ "width": 700,
1254
+ "height": 400,
1255
+ "mark": "bar",
1256
+ "encoding": {
1257
+ "y": {
1258
+ "field": "Object Name",
1259
+ "type": "ordinal",
1260
+ "axis": {
1261
+ "labelFontSize": 15,
1262
+ "titleFontSize": 20,
1263
+ "title": "Object"
1264
+ }
1265
+ },
1266
+ "x": {
1267
+ "field": "Start",
1268
+ "type": "quantitative",
1269
+ "title": "milliseconds",
1270
+ "axis": {
1271
+ "titleFontSize": 20
1272
+ }
1273
+ },
1274
+ "x2": {"field": "End"},
1275
+ "color": {
1276
+ "field": "Event Subclass",
1277
+ "scale": {
1278
+ "domain": ["Process", "ExecuteSql"],
1279
+ "range": ["#FFC000","#0070C0"]
1280
+ },
1281
+ "legend": {
1282
+ "labelFontSize": 20,
1283
+ "titleFontSize": 20,
1284
+ "title": "Event Type"
1285
+ }
1286
+ },
1287
+ "tooltip": [
1288
+ {"field": "Duration", "type": "quantitative", "format": ","},
1289
+ {"field": "Cpu Time", "type": "quantitative", "format": ","},
1290
+ {"field": "Event Subclass", "type": "nominal"}
1291
+ ]
1292
+ }
1293
+ }"""
1294
+ )
1295
+
1296
+ with widget:
1297
+ widget.clear_output(wait=True)
1298
+ _show_chart(spec, title=title)
1299
+
1300
+
1301
+ def _convert_data_type(input_data_type: str) -> str:
1302
+
1303
+ data_type_mapping = {
1304
+ "string": "String",
1305
+ "int": "Int64",
1306
+ "tinyint": "Int64",
1307
+ "smallint": "Int64",
1308
+ "bigint": "Int64",
1309
+ "boolean": "Boolean",
1310
+ "timestamp": "DateTime",
1311
+ "date": "DateTime",
1312
+ "double": "Double",
1313
+ "float": "Double",
1314
+ }
1315
+
1316
+ if "decimal" in input_data_type:
1317
+ return "Decimal"
1318
+ else:
1319
+ return data_type_mapping.get(input_data_type)
1320
+
1321
+
1322
+ def _is_valid_uuid(
1323
+ guid: str,
1324
+ ):
1325
+ """
1326
+ Validates if a string is a valid GUID in version 4
1327
+
1328
+ Parameters
1329
+ ----------
1330
+ guid : str
1331
+ GUID to be validated.
1332
+
1333
+ Returns
1334
+ -------
1335
+ bool
1336
+ Boolean that indicates if the string is a GUID or not.
1337
+ """
1338
+
1339
+ try:
1340
+ UUID(str(guid), version=4)
1341
+ return True
1342
+ except ValueError:
1343
+ return False
sempy_labs/_icons.py CHANGED
@@ -10,20 +10,6 @@ bullet = "\u2022"
10
10
  warning = "⚠️"
11
11
  error = "\u274C"
12
12
  info = "ℹ️"
13
- data_type_mapping = {
14
- "string": "String",
15
- "int": "Int64",
16
- "tinyint": "Int64",
17
- "smallint": "Int64",
18
- "bigint": "Int64",
19
- "boolean": "Boolean",
20
- "timestamp": "DateTime",
21
- "date": "DateTime",
22
- "decimal(38,18)": "Decimal",
23
- "decimal(19,4)": "Decimal",
24
- "double": "Double",
25
- "float": "Double",
26
- }
27
13
  measure_icon = "\u2211"
28
14
  table_icon = "\u229E"
29
15
  column_icon = "\u229F"
@@ -93,14 +79,15 @@ sku_mapping = {
93
79
  "P4": "F512",
94
80
  "P5": "F1024",
95
81
  }
96
- refreshTypes = [
97
- "full",
98
- "automatic",
99
- "dataOnly",
100
- "calculate",
101
- "clearValues",
102
- "defragment",
103
- ]
82
+
83
+ refresh_type_mapping = {
84
+ "full": "full",
85
+ "auto": "automatic",
86
+ "data": "dataOnly",
87
+ "calc": "calculate",
88
+ "clear": "clearValues",
89
+ "defrag": "defragment",
90
+ }
104
91
 
105
92
  itemTypes = {
106
93
  "DataPipeline": "dataPipelines",
@@ -114,3 +101,49 @@ itemTypes = {
114
101
  "Warehouse": "warehouses",
115
102
  }
116
103
  default_schema = "dbo"
104
+
105
+ data_type_string = "string"
106
+ data_type_long = "long"
107
+ data_type_timestamp = "timestamp"
108
+ data_type_double = "double"
109
+ data_type_bool = "bool"
110
+ int_format = "int"
111
+ pct_format = "pct"
112
+ no_format = ""
113
+
114
+ bpa_schema = {
115
+ "Capacity Name": data_type_string,
116
+ "Capacity Id": data_type_string,
117
+ "Workspace Name": data_type_string,
118
+ "Workspace Id": data_type_string,
119
+ "Dataset Name": data_type_string,
120
+ "Dataset Id": data_type_string,
121
+ "Configured By": data_type_string,
122
+ "Rule Name": data_type_string,
123
+ "Category": data_type_string,
124
+ "Severity": data_type_string,
125
+ "Object Type": data_type_string,
126
+ "Object Name": data_type_string,
127
+ "Description": data_type_string,
128
+ "URL": data_type_string,
129
+ "RunId": data_type_long,
130
+ "Timestamp": data_type_timestamp,
131
+ }
132
+
133
+ sll_ann_name = "PBI_ProTooling"
134
+ sll_prefix = "SLL_"
135
+ sll_tags = []
136
+ base_cols = ["EventClass", "EventSubclass", "CurrentTime", "TextData"]
137
+ end_cols = base_cols + [
138
+ "StartTime",
139
+ "EndTime",
140
+ "Duration",
141
+ "CpuTime",
142
+ "Success",
143
+ "IntegerData",
144
+ "ObjectID",
145
+ ]
146
+ refresh_event_schema = {
147
+ "JobGraph": base_cols,
148
+ "ProgressReportEnd": end_cols,
149
+ }
@@ -14,6 +14,8 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the KQL databases within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List KQL Databases <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/list-kql-databases>`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -72,6 +74,8 @@ def create_kql_database(
72
74
  """
73
75
  Creates a KQL database.
74
76
 
77
+ This is a wrapper function for the following API: `Items - Create KQL Database <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/create-kql-database>`_.
78
+
75
79
  Parameters
76
80
  ----------
77
81
  name: str
@@ -107,6 +111,8 @@ def delete_kql_database(name: str, workspace: Optional[str] = None):
107
111
  """
108
112
  Deletes a KQL database.
109
113
 
114
+ This is a wrapper function for the following API: `Items - Delete KQL Database <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/delete-kql-database>`_.
115
+
110
116
  Parameters
111
117
  ----------
112
118
  name: str
@@ -14,6 +14,8 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the KQL querysets within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List KQL Querysets <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/list-kql-querysets>`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -62,6 +64,8 @@ def create_kql_queryset(
62
64
  """
63
65
  Creates a KQL queryset.
64
66
 
67
+ This is a wrapper function for the following API: `Items - Create KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/create-kql-queryset>`_.
68
+
65
69
  Parameters
66
70
  ----------
67
71
  name: str
@@ -97,6 +101,8 @@ def delete_kql_queryset(name: str, workspace: Optional[str] = None):
97
101
  """
98
102
  Deletes a KQL queryset.
99
103
 
104
+ This is a wrapper function for the following API: `Items - Delete KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/delete-kql-queryset>`_.
105
+
100
106
  Parameters
101
107
  ----------
102
108
  name: str
@@ -1515,7 +1515,7 @@ def list_semantic_model_object_report_usage(
1515
1515
  "Object Type": object_type,
1516
1516
  }
1517
1517
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1518
- df["Referenced Object Type"] = df["Referenced Object Type"].replace(
1518
+ df["Object Type"] = df["Object Type"].replace(
1519
1519
  "Attribute Hierarchy", "Column"
1520
1520
  )
1521
1521
  if object_type in ["Measure", "Calc Column", "Calc Table", "Hierarchy"]: