semantic-link-labs 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (82) hide show
  1. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/METADATA +15 -3
  2. semantic_link_labs-0.7.4.dist-info/RECORD +134 -0
  3. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +120 -24
  5. sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
  6. sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
  9. sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
  10. sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
  11. sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
  12. sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
  13. sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
  14. sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
  16. sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
  17. sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
  18. sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
  19. sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
  20. sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
  22. sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
  23. sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
  24. sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
  27. sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
  28. sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
  29. sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
  30. sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
  32. sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +914 -0
  36. sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
  37. sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
  38. sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
  41. sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
  42. sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
  43. sempy_labs/_capacities.py +541 -0
  44. sempy_labs/_clear_cache.py +298 -3
  45. sempy_labs/_connections.py +138 -0
  46. sempy_labs/_dataflows.py +130 -0
  47. sempy_labs/_deployment_pipelines.py +171 -0
  48. sempy_labs/_environments.py +156 -0
  49. sempy_labs/_generate_semantic_model.py +148 -27
  50. sempy_labs/_git.py +380 -0
  51. sempy_labs/_helper_functions.py +203 -8
  52. sempy_labs/_icons.py +43 -0
  53. sempy_labs/_list_functions.py +170 -1012
  54. sempy_labs/_model_bpa.py +90 -112
  55. sempy_labs/_model_bpa_bulk.py +3 -1
  56. sempy_labs/_model_bpa_rules.py +788 -800
  57. sempy_labs/_notebooks.py +143 -0
  58. sempy_labs/_query_scale_out.py +28 -7
  59. sempy_labs/_spark.py +465 -0
  60. sempy_labs/_sql.py +120 -0
  61. sempy_labs/_translations.py +3 -1
  62. sempy_labs/_vertipaq.py +160 -99
  63. sempy_labs/_workspace_identity.py +66 -0
  64. sempy_labs/_workspaces.py +294 -0
  65. sempy_labs/directlake/__init__.py +2 -0
  66. sempy_labs/directlake/_directlake_schema_compare.py +1 -2
  67. sempy_labs/directlake/_directlake_schema_sync.py +1 -2
  68. sempy_labs/directlake/_dl_helper.py +4 -7
  69. sempy_labs/directlake/_generate_shared_expression.py +85 -0
  70. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -2
  71. sempy_labs/lakehouse/_get_lakehouse_tables.py +7 -3
  72. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  73. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +5 -0
  74. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +6 -2
  75. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -5
  76. sempy_labs/migration/_migration_validation.py +6 -0
  77. sempy_labs/report/_report_functions.py +21 -42
  78. sempy_labs/report/_report_rebind.py +5 -0
  79. sempy_labs/tom/_model.py +95 -52
  80. semantic_link_labs-0.7.2.dist-info/RECORD +0 -111
  81. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,294 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ pagination,
8
+ resolve_capacity_id,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = None):
14
+ """
15
+ Removes a user from a workspace.
16
+
17
+ Parameters
18
+ ----------
19
+ email_address : str
20
+ The email address of the user.
21
+ workspace : str, default=None
22
+ The name of the workspace.
23
+ Defaults to None which resolves to the workspace of the attached lakehouse
24
+ or if no lakehouse attached, resolves to the workspace of the notebook.
25
+
26
+ Returns
27
+ -------
28
+ """
29
+
30
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
31
+
32
+ client = fabric.PowerBIRestClient()
33
+ response = client.delete(f"/v1.0/myorg/groups/{workspace_id}/users/{email_address}")
34
+
35
+ if response.status_code != 200:
36
+ raise FabricHTTPException(response)
37
+ print(
38
+ f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace."
39
+ )
40
+
41
+
42
+ def update_workspace_user(
43
+ email_address: str,
44
+ role_name: str,
45
+ principal_type: Optional[str] = "User",
46
+ workspace: Optional[str] = None,
47
+ ):
48
+ """
49
+ Updates a user's role within a workspace.
50
+
51
+ Parameters
52
+ ----------
53
+ email_address : str
54
+ The email address of the user.
55
+ role_name : str
56
+ The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
57
+ principal_type : str, default='User'
58
+ The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
59
+ workspace : str, default=None
60
+ The name of the workspace.
61
+ Defaults to None which resolves to the workspace of the attached lakehouse
62
+ or if no lakehouse attached, resolves to the workspace of the notebook.
63
+ """
64
+
65
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
66
+
67
+ role_names = icons.workspace_roles
68
+ role_name = role_name.capitalize()
69
+ if role_name not in role_names:
70
+ raise ValueError(
71
+ f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
72
+ )
73
+ principal_types = icons.principal_types
74
+ principal_type = principal_type.capitalize()
75
+ if principal_type not in principal_types:
76
+ raise ValueError(
77
+ f"{icons.red_dot} Invalid princpal type. Valid options: {principal_types}."
78
+ )
79
+
80
+ request_body = {
81
+ "emailAddress": email_address,
82
+ "groupUserAccessRight": role_name,
83
+ "principalType": principal_type,
84
+ "identifier": email_address,
85
+ }
86
+
87
+ client = fabric.PowerBIRestClient()
88
+ response = client.put(f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body)
89
+
90
+ if response.status_code != 200:
91
+ raise FabricHTTPException(response)
92
+ print(
93
+ f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace}' workspace."
94
+ )
95
+
96
+
97
+ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
98
+ """
99
+ A list of all the users of a workspace and their roles.
100
+
101
+ Parameters
102
+ ----------
103
+ workspace : str, default=None
104
+ The name of the workspace.
105
+ Defaults to None which resolves to the workspace of the attached lakehouse
106
+ or if no lakehouse attached, resolves to the workspace of the notebook.
107
+
108
+ Returns
109
+ -------
110
+ pandas.DataFrame
111
+ A pandas dataframe the users of a workspace and their properties.
112
+ """
113
+
114
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
115
+
116
+ df = pd.DataFrame(columns=["User Name", "Email Address", "Role", "Type", "User ID"])
117
+ client = fabric.FabricRestClient()
118
+ response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
119
+ if response.status_code != 200:
120
+ raise FabricHTTPException(response)
121
+
122
+ responses = pagination(client, response)
123
+
124
+ for r in responses:
125
+ for v in r.get("value", []):
126
+ p = v.get("principal", {})
127
+ new_data = {
128
+ "User Name": p.get("displayName"),
129
+ "User ID": p.get("id"),
130
+ "Type": p.get("type"),
131
+ "Role": v.get("role"),
132
+ "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
133
+ }
134
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+
136
+ return df
137
+
138
+
139
+ def add_user_to_workspace(
140
+ email_address: str,
141
+ role_name: str,
142
+ principal_type: Optional[str] = "User",
143
+ workspace: Optional[str] = None,
144
+ ):
145
+ """
146
+ Adds a user to a workspace.
147
+
148
+ Parameters
149
+ ----------
150
+ email_address : str
151
+ The email address of the user.
152
+ role_name : str
153
+ The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
154
+ principal_type : str, default='User'
155
+ The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
156
+ workspace : str, default=None
157
+ The name of the workspace.
158
+ Defaults to None which resolves to the workspace of the attached lakehouse
159
+ or if no lakehouse attached, resolves to the workspace of the notebook.
160
+ """
161
+
162
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
163
+
164
+ role_names = icons.workspace_roles
165
+ role_name = role_name.capitalize()
166
+ if role_name not in role_names:
167
+ raise ValueError(
168
+ f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
169
+ )
170
+ plural = "n" if role_name == "Admin" else ""
171
+ principal_types = icons.principal_types
172
+ principal_type = principal_type.capitalize()
173
+ if principal_type not in principal_types:
174
+ raise ValueError(
175
+ f"{icons.red_dot} Invalid princpal type. Valid options: {principal_types}."
176
+ )
177
+
178
+ client = fabric.PowerBIRestClient()
179
+
180
+ request_body = {
181
+ "emailAddress": email_address,
182
+ "groupUserAccessRight": role_name,
183
+ "principalType": principal_type,
184
+ "identifier": email_address,
185
+ }
186
+
187
+ response = client.post(
188
+ f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body
189
+ )
190
+
191
+ if response.status_code != 200:
192
+ raise FabricHTTPException(response)
193
+ print(
194
+ f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace}' workspace."
195
+ )
196
+
197
+
198
+ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] = None):
199
+ """
200
+ Assigns a workspace to a capacity.
201
+
202
+ Parameters
203
+ ----------
204
+ capacity_name : str
205
+ The name of the capacity.
206
+ workspace : str, default=None
207
+ The name of the Fabric workspace.
208
+ Defaults to None which resolves to the workspace of the attached lakehouse
209
+ or if no lakehouse attached, resolves to the workspace of the notebook.
210
+ """
211
+
212
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
213
+ capacity_id = resolve_capacity_id(capacity_name=capacity_name)
214
+
215
+ request_body = {"capacityId": capacity_id}
216
+
217
+ client = fabric.FabricRestClient()
218
+ response = client.post(
219
+ f"/v1/workspaces/{workspace_id}/assignToCapacity",
220
+ json=request_body,
221
+ )
222
+
223
+ if response.status_code not in [200, 202]:
224
+ raise FabricHTTPException(response)
225
+ print(
226
+ f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
227
+ )
228
+
229
+
230
+ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
231
+ """
232
+ Unassigns a workspace from its assigned capacity.
233
+
234
+ Parameters
235
+ ----------
236
+ workspace : str, default=None
237
+ The name of the Fabric workspace.
238
+ Defaults to None which resolves to the workspace of the attached lakehouse
239
+ or if no lakehouse attached, resolves to the workspace of the notebook.
240
+ """
241
+
242
+ # https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/unassign-from-capacity?tabs=HTTP
243
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
244
+
245
+ client = fabric.FabricRestClient()
246
+ response = client.post(f"/v1/workspaces/{workspace_id}/unassignFromCapacity")
247
+
248
+ if response.status_code not in [200, 202]:
249
+ raise FabricHTTPException(response)
250
+ print(
251
+ f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
252
+ )
253
+
254
+
255
+ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataFrame:
256
+ """
257
+ Shows the members of a given workspace.
258
+
259
+ Parameters
260
+ ----------
261
+ workspace : str, default=None
262
+ The Fabric workspace name.
263
+ Defaults to None which resolves to the workspace of the attached lakehouse
264
+ or if no lakehouse attached, resolves to the workspace of the notebook.
265
+
266
+ Returns
267
+ -------
268
+ pandas.DataFrame
269
+ A pandas dataframe showing the members of a given workspace and their roles.
270
+ """
271
+
272
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
273
+
274
+ df = pd.DataFrame(columns=["User Name", "User Email", "Role Name", "Type"])
275
+
276
+ client = fabric.FabricRestClient()
277
+ response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
278
+ if response.status_code != 200:
279
+ raise FabricHTTPException(response)
280
+
281
+ responses = pagination(client, response)
282
+
283
+ for r in responses:
284
+ for i in r.get("value", []):
285
+ principal = i.get("principal", {})
286
+ new_data = {
287
+ "User Name": principal.get("displayName"),
288
+ "Role Name": i.get("role"),
289
+ "Type": principal.get("type"),
290
+ "User Email": principal.get("userDetails", {}).get("userPrincipalName"),
291
+ }
292
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
293
+
294
+ return df
@@ -1,3 +1,4 @@
1
+ from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
1
2
  from sempy_labs.directlake._directlake_schema_compare import direct_lake_schema_compare
2
3
  from sempy_labs.directlake._directlake_schema_sync import direct_lake_schema_sync
3
4
  from sempy_labs.directlake._dl_helper import (
@@ -31,6 +32,7 @@ from sempy_labs.directlake._warm_cache import (
31
32
  )
32
33
 
33
34
  __all__ = [
35
+ "generate_shared_expression",
34
36
  "direct_lake_schema_compare",
35
37
  "direct_lake_schema_sync",
36
38
  "check_fallback_reason",
@@ -6,7 +6,6 @@ from sempy_labs._helper_functions import (
6
6
  from IPython.display import display
7
7
  from sempy_labs.lakehouse import get_lakehouse_columns
8
8
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
9
- from sempy_labs._list_functions import list_tables
10
9
  from typing import Optional
11
10
  import sempy_labs._icons as icons
12
11
  from sempy._utils._log import log
@@ -61,7 +60,7 @@ def direct_lake_schema_compare(
61
60
  f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
62
61
  )
63
62
 
64
- dfT = list_tables(dataset, workspace)
63
+ dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
65
64
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
66
65
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
67
66
 
@@ -32,7 +32,6 @@ def direct_lake_schema_sync(
32
32
 
33
33
  sempy.fabric._client._utils._init_analysis_services()
34
34
  import Microsoft.AnalysisServices.Tabular as TOM
35
- import System
36
35
 
37
36
  if "lakehouse" in kwargs:
38
37
  print(
@@ -99,7 +98,7 @@ def direct_lake_schema_sync(
99
98
  table_name=table_name,
100
99
  column_name=lakeCName,
101
100
  source_column=lakeCName,
102
- data_type=System.Enum.Parse(TOM.DataType, dt),
101
+ data_type=dt,
103
102
  )
104
103
  print(
105
104
  f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset}' semantic model within the '{workspace}' workspace."
@@ -126,18 +126,15 @@ def generate_direct_lake_semantic_model(
126
126
  expr = get_shared_expression(lakehouse=lakehouse, workspace=lakehouse_workspace)
127
127
  dfD = fabric.list_datasets(workspace=workspace)
128
128
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
129
- dfD_filt_len = len(dfD_filt)
130
129
 
131
- if dfD_filt_len > 0 and overwrite is False:
130
+ if len(dfD_filt) > 0 and not overwrite:
132
131
  raise ValueError(
133
132
  f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
134
133
  )
135
- if dfD_filt_len > 0 and overwrite:
136
- print(
137
- f"{icons.warning} Overwriting the existing '{dataset}' semantic model within the '{workspace}' workspace."
138
- )
139
134
 
140
- create_blank_semantic_model(dataset=dataset, workspace=workspace)
135
+ create_blank_semantic_model(
136
+ dataset=dataset, workspace=workspace, overwrite=overwrite
137
+ )
141
138
 
142
139
  @retry(
143
140
  sleep_time=1,
@@ -0,0 +1,85 @@
1
+ import sempy.fabric as fabric
2
+ from sempy_labs._helper_functions import (
3
+ resolve_lakehouse_name,
4
+ resolve_lakehouse_id,
5
+ resolve_warehouse_id,
6
+ )
7
+ from typing import Optional
8
+ import sempy_labs._icons as icons
9
+ from sempy.fabric.exceptions import FabricHTTPException
10
+
11
+
12
+ def generate_shared_expression(
13
+ item_name: Optional[str] = None,
14
+ item_type: Optional[str] = "Lakehouse",
15
+ workspace: Optional[str] = None,
16
+ ) -> str:
17
+ """
18
+ Dynamically generates the M expression used by a Direct Lake model for a given lakehouse/warehouse.
19
+
20
+ Parameters
21
+ ----------
22
+ item_name : str, default=None
23
+ The Fabric lakehouse or warehouse name.
24
+ Defaults to None which resolves to the lakehouse attached to the notebook.
25
+ item_type : str, default="Lakehouse"
26
+ The Fabric item name. Valid options: 'Lakehouse', 'Warehouse'.
27
+ workspace : str, default=None
28
+ The Fabric workspace used by the item.
29
+ Defaults to None which resolves to the workspace of the attached lakehouse
30
+ or if no lakehouse attached, resolves to the workspace of the notebook.
31
+
32
+ Returns
33
+ -------
34
+ str
35
+ Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
36
+ """
37
+
38
+ workspace = fabric.resolve_workspace_name(workspace)
39
+ workspace_id = fabric.resolve_workspace_id(workspace)
40
+ item_types = ["Lakehouse", "Warehouse"]
41
+ item_type = item_type.capitalize()
42
+ if item_type not in item_types:
43
+ raise ValueError(
44
+ f"{icons.red_dot} Invalid item type. Valid options: {item_types}."
45
+ )
46
+
47
+ if item_name is None:
48
+ item_id = fabric.get_lakehouse_id()
49
+ item_name = resolve_lakehouse_name(item_id, workspace)
50
+ elif item_name is not None and item_type == "Lakehouse":
51
+ item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace)
52
+ elif item_type == "Warehouse":
53
+ item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace)
54
+
55
+ client = fabric.FabricRestClient()
56
+ item_type_rest = f"{item_type.lower()}s"
57
+ response = client.get(f"/v1/workspaces/{workspace_id}/{item_type_rest}/{item_id}")
58
+ if response.status_code != 200:
59
+ raise FabricHTTPException(response)
60
+
61
+ if item_type == "Lakehouse":
62
+ prop = response.json()["properties"]["sqlEndpointProperties"]
63
+ sqlEPCS = prop["connectionString"]
64
+ sqlepid = prop["id"]
65
+ provStatus = prop["provisioningStatus"]
66
+ elif item_type == "Warehouse":
67
+ prop = response.json()["properties"]
68
+ sqlEPCS = prop["connectionString"]
69
+ sqlepid = item_id
70
+ provStatus = None
71
+
72
+ if provStatus == "InProgress":
73
+ raise ValueError(
74
+ f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
75
+ )
76
+
77
+ sh = (
78
+ 'let\n\tdatabase = Sql.Database("'
79
+ + sqlEPCS
80
+ + '", "'
81
+ + sqlepid
82
+ + '")\nin\n\tdatabase'
83
+ )
84
+
85
+ return sh
@@ -1,6 +1,5 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._list_functions import list_tables
4
3
  from sempy_labs._helper_functions import format_dax_object_name
5
4
  from typing import Optional, Tuple
6
5
  from sempy._utils._log import log
@@ -33,7 +32,7 @@ def show_unsupported_direct_lake_objects(
33
32
 
34
33
  workspace = fabric.resolve_workspace_name(workspace)
35
34
 
36
- dfT = list_tables(dataset, workspace)
35
+ dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
37
36
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
38
37
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
39
38
 
@@ -7,6 +7,7 @@ from sempy_labs._helper_functions import (
7
7
  resolve_lakehouse_id,
8
8
  resolve_lakehouse_name,
9
9
  resolve_workspace_name_and_id,
10
+ pagination,
10
11
  )
11
12
  from sempy_labs.directlake._guardrails import (
12
13
  get_sku_size,
@@ -52,8 +53,6 @@ def get_lakehouse_tables(
52
53
  Shows the tables/columns within a lakehouse and their properties.
53
54
  """
54
55
 
55
- from sempy_labs._helper_functions import pagination
56
-
57
56
  df = pd.DataFrame(
58
57
  columns=[
59
58
  "Workspace Name",
@@ -96,6 +95,9 @@ def get_lakehouse_tables(
96
95
 
97
96
  responses = pagination(client, response)
98
97
 
98
+ if not responses[0].get("data"):
99
+ return df
100
+
99
101
  dfs = []
100
102
  for r in responses:
101
103
  for i in r.get("data", []):
@@ -108,7 +110,9 @@ def get_lakehouse_tables(
108
110
  "Location": i.get("location"),
109
111
  }
110
112
  dfs.append(pd.DataFrame(new_data, index=[0]))
111
- df = pd.concat(dfs, ignore_index=True)
113
+
114
+ if dfs:
115
+ df = pd.concat(dfs, ignore_index=True)
112
116
 
113
117
  if extended:
114
118
  sku_value = get_sku_size(workspace)
@@ -52,6 +52,11 @@ def migrate_calc_tables_to_lakehouse(
52
52
  or if no lakehouse attached, resolves to the workspace of the notebook.
53
53
  """
54
54
 
55
+ if dataset == new_dataset:
56
+ raise ValueError(
57
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
58
+ )
59
+
55
60
  workspace = fabric.resolve_workspace_name(workspace)
56
61
 
57
62
  if new_dataset_workspace is None:
@@ -48,6 +48,11 @@ def migrate_calc_tables_to_semantic_model(
48
48
  or if no lakehouse attached, resolves to the workspace of the notebook.
49
49
  """
50
50
 
51
+ if dataset == new_dataset:
52
+ raise ValueError(
53
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
54
+ )
55
+
51
56
  workspace = fabric.resolve_workspace_name(workspace)
52
57
 
53
58
  if new_dataset_workspace is None:
@@ -1,7 +1,6 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import re
4
- from sempy_labs._list_functions import list_tables
5
4
  from sempy_labs._helper_functions import (
6
5
  create_relationship_name,
7
6
  retry,
@@ -43,12 +42,17 @@ def migrate_model_objects_to_semantic_model(
43
42
  import Microsoft.AnalysisServices.Tabular as TOM
44
43
  import System
45
44
 
45
+ if dataset == new_dataset:
46
+ raise ValueError(
47
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
48
+ )
49
+
46
50
  workspace = fabric.resolve_workspace_name(workspace)
47
51
 
48
52
  if new_dataset_workspace is None:
49
53
  new_dataset_workspace = workspace
50
54
 
51
- dfT = list_tables(dataset, workspace)
55
+ dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
52
56
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
53
57
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
54
58
  dfRole = fabric.get_roles(dataset=dataset, workspace=workspace)
@@ -1,8 +1,5 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import datetime
4
- import time
5
- from sempy_labs._list_functions import list_tables
6
3
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
4
  from sempy_labs._helper_functions import resolve_lakehouse_name, retry
8
5
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
@@ -45,9 +42,13 @@ def migrate_tables_columns_to_semantic_model(
45
42
  The Fabric workspace used by the lakehouse.
46
43
  Defaults to None which resolves to the workspace of the attached lakehouse
47
44
  or if no lakehouse attached, resolves to the workspace of the notebook.
48
-
49
45
  """
50
46
 
47
+ if dataset == new_dataset:
48
+ raise ValueError(
49
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
50
+ )
51
+
51
52
  workspace = fabric.resolve_workspace_name(workspace)
52
53
 
53
54
  if new_dataset_workspace is None:
@@ -71,7 +72,7 @@ def migrate_tables_columns_to_semantic_model(
71
72
  shEx = get_shared_expression(lakehouse, lakehouse_workspace)
72
73
 
73
74
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
74
- dfT = list_tables(dataset, workspace)
75
+ dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
75
76
  dfT.rename(columns={"Type": "Table Type"}, inplace=True)
76
77
  dfC = pd.merge(
77
78
  dfC,
@@ -3,6 +3,7 @@ import pandas as pd
3
3
  from typing import Optional
4
4
  from sempy_labs._list_functions import list_semantic_model_objects
5
5
  from sempy._utils._log import log
6
+ import sempy_labs._icons as icons
6
7
 
7
8
 
8
9
  @log
@@ -36,6 +37,11 @@ def migration_validation(
36
37
  A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully.
37
38
  """
38
39
 
40
+ if dataset == new_dataset:
41
+ raise ValueError(
42
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
43
+ )
44
+
39
45
  workspace = fabric.resolve_workspace_name(workspace)
40
46
  if new_dataset_workspace is None:
41
47
  new_dataset_workspace = workspace