semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (101) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +33 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +101 -98
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +24 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +4 -0
  47. sempy_labs/_deployment_pipelines.py +13 -7
  48. sempy_labs/_environments.py +6 -0
  49. sempy_labs/_eventhouses.py +6 -0
  50. sempy_labs/_eventstreams.py +6 -0
  51. sempy_labs/_external_data_shares.py +6 -4
  52. sempy_labs/_generate_semantic_model.py +26 -3
  53. sempy_labs/_git.py +14 -14
  54. sempy_labs/_helper_functions.py +172 -0
  55. sempy_labs/_icons.py +55 -22
  56. sempy_labs/_kql_databases.py +6 -0
  57. sempy_labs/_kql_querysets.py +6 -0
  58. sempy_labs/_list_functions.py +1 -1
  59. sempy_labs/_managed_private_endpoints.py +166 -0
  60. sempy_labs/_mirrored_warehouses.py +2 -0
  61. sempy_labs/_ml_experiments.py +6 -0
  62. sempy_labs/_ml_models.py +6 -0
  63. sempy_labs/_model_bpa.py +6 -1
  64. sempy_labs/_model_bpa_bulk.py +11 -25
  65. sempy_labs/_model_bpa_rules.py +8 -3
  66. sempy_labs/_notebooks.py +107 -12
  67. sempy_labs/_query_scale_out.py +8 -6
  68. sempy_labs/_refresh_semantic_model.py +299 -49
  69. sempy_labs/_spark.py +12 -5
  70. sempy_labs/_translations.py +2 -0
  71. sempy_labs/_vertipaq.py +58 -67
  72. sempy_labs/_warehouses.py +79 -0
  73. sempy_labs/_workloads.py +128 -0
  74. sempy_labs/_workspace_identity.py +4 -4
  75. sempy_labs/_workspaces.py +14 -1
  76. sempy_labs/admin/_basic_functions.py +85 -43
  77. sempy_labs/admin/_domains.py +18 -18
  78. sempy_labs/directlake/__init__.py +2 -0
  79. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  80. sempy_labs/directlake/_dl_helper.py +4 -1
  81. sempy_labs/directlake/_get_shared_expression.py +7 -1
  82. sempy_labs/directlake/_guardrails.py +2 -1
  83. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  84. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  85. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  86. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  87. sempy_labs/lakehouse/_shortcuts.py +4 -0
  88. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  89. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  90. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  91. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  92. sempy_labs/migration/_migration_validation.py +2 -0
  93. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  94. sempy_labs/report/__init__.py +4 -1
  95. sempy_labs/report/_generate_report.py +6 -0
  96. sempy_labs/report/_paginated.py +74 -0
  97. sempy_labs/report/_report_functions.py +6 -0
  98. sempy_labs/report/_report_rebind.py +2 -0
  99. sempy_labs/tom/_model.py +64 -33
  100. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  101. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
@@ -34,6 +34,8 @@ def list_domains(non_empty_only: bool = False) -> pd.DataFrame:
34
34
  """
35
35
  Shows a list of domains.
36
36
 
37
+ This is a wrapper function for the following API: `Domains - List Domains <https://learn.microsoft.com/rest/api/fabric/admin/domains/list-domains>`_.
38
+
37
39
  Parameters
38
40
  ----------
39
41
  non_empty_only : bool, default=False
@@ -46,8 +48,6 @@ def list_domains(non_empty_only: bool = False) -> pd.DataFrame:
46
48
  A pandas dataframe showing a list of the domains.
47
49
  """
48
50
 
49
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/list-domains?tabs=HTTP
50
-
51
51
  df = pd.DataFrame(
52
52
  columns=[
53
53
  "Domain ID",
@@ -84,6 +84,8 @@ def list_domain_workspaces(domain_name: str) -> pd.DataFrame:
84
84
  """
85
85
  Shows a list of workspaces within the domain.
86
86
 
87
+ This is a wrapper function for the following API: `Domains - List Domain Workspaces <https://learn.microsoft.com/rest/api/fabric/admin/domains/list-domain-workspaces?tabs=HTTP>`_.
88
+
87
89
  Parameters
88
90
  ----------
89
91
  domain_name : str
@@ -95,8 +97,6 @@ def list_domain_workspaces(domain_name: str) -> pd.DataFrame:
95
97
  A pandas dataframe showing a list of workspaces within the domain.
96
98
  """
97
99
 
98
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/list-domain-workspaces?tabs=HTTP
99
-
100
100
  domain_id = resolve_domain_id(domain_name)
101
101
 
102
102
  df = pd.DataFrame(columns=["Workspace ID", "Workspace Name"])
@@ -125,6 +125,8 @@ def create_domain(
125
125
  """
126
126
  Creates a new domain.
127
127
 
128
+ This is a wrapper function for the following API: `Domains - Create Domain <https://learn.microsoft.com/rest/api/fabric/admin/domains/create-domain>`_.
129
+
128
130
  Parameters
129
131
  ----------
130
132
  domain_name : str
@@ -135,8 +137,6 @@ def create_domain(
135
137
  The parent domain name.
136
138
  """
137
139
 
138
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/create-domain?tabs=HTTP
139
-
140
140
  if parent_domain_name is not None:
141
141
  parent_domain_id = resolve_domain_id(parent_domain_name)
142
142
 
@@ -160,14 +160,14 @@ def delete_domain(domain_name: str):
160
160
  """
161
161
  Deletes a domain.
162
162
 
163
+ This is a wrapper function for the following API: `Domains - Delete Domain <https://learn.microsoft.com/rest/api/fabric/admin/domains/delete-domain>`_.
164
+
163
165
  Parameters
164
166
  ----------
165
167
  domain_name : str
166
168
  The domain name.
167
169
  """
168
170
 
169
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/delete-domain?tabs=HTTP
170
-
171
171
  domain_id = resolve_domain_id(domain_name)
172
172
 
173
173
  client = fabric.FabricRestClient()
@@ -187,6 +187,8 @@ def update_domain(
187
187
  """
188
188
  Updates a domain's properties.
189
189
 
190
+ This is a wrapper function for the following API: `Domains - Update Domain <https://learn.microsoft.com/rest/api/fabric/admin/domains/update-domain>`_.
191
+
190
192
  Parameters
191
193
  ----------
192
194
  domain_name : str
@@ -197,8 +199,6 @@ def update_domain(
197
199
  The domain `contributor scope <https://learn.microsoft.com/rest/api/fabric/admin/domains/update-domain?tabs=HTTP#contributorsscopetype>`_.
198
200
  """
199
201
 
200
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/update-domain?tabs=HTTP
201
-
202
202
  contributors_scopes = ["AdminsOnly", "AllTenant", "SpecificUsersAndGroups"]
203
203
 
204
204
  if contributors_scope not in contributors_scopes:
@@ -231,6 +231,8 @@ def assign_domain_workspaces_by_capacities(
231
231
  """
232
232
  Assigns all workspaces that reside on the specified capacities to the specified domain.
233
233
 
234
+ This is a wrapper function for the following API: `Domains - Assign Domain Workspaces By Capacities <https://learn.microsoft.com/rest/api/fabric/admin/domains/assign-domain-workspaces-by-capacities>`_.
235
+
234
236
  Parameters
235
237
  ----------
236
238
  domain_name : str
@@ -239,8 +241,6 @@ def assign_domain_workspaces_by_capacities(
239
241
  The capacity names.
240
242
  """
241
243
 
242
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/assign-domain-workspaces-by-capacities?tabs=HTTP
243
-
244
244
  from sempy_labs.admin import list_capacities
245
245
 
246
246
  domain_id = resolve_domain_id(domain_name)
@@ -287,6 +287,8 @@ def assign_domain_workspaces(domain_name: str, workspace_names: str | List[str])
287
287
  """
288
288
  Assigns workspaces to the specified domain by workspace.
289
289
 
290
+ This is a wrapper function for the following API: `Domains - Assign Domain Workspaces By Ids <https://learn.microsoft.com/rest/api/fabric/admin/domains/assign-domain-workspaces-by-ids>`_.
291
+
290
292
  Parameters
291
293
  ----------
292
294
  domain_name : str
@@ -295,8 +297,6 @@ def assign_domain_workspaces(domain_name: str, workspace_names: str | List[str])
295
297
  The Fabric workspace(s).
296
298
  """
297
299
 
298
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/assign-domain-workspaces-by-ids?tabs=HTTP
299
-
300
300
  domain_id = resolve_domain_id(domain_name=domain_name)
301
301
 
302
302
  if isinstance(workspace_names, str):
@@ -340,14 +340,14 @@ def unassign_all_domain_workspaces(domain_name: str):
340
340
  """
341
341
  Unassigns all workspaces from the specified domain.
342
342
 
343
+ This is a wrapper function for the following API: `Domains - Unassign All Domain Workspaces <https://learn.microsoft.com/rest/api/fabric/admin/domains/unassign-all-domain-workspaces>`_.
344
+
343
345
  Parameters
344
346
  ----------
345
347
  domain_name : str
346
348
  The domain name.
347
349
  """
348
350
 
349
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/unassign-all-domain-workspaces?tabs=HTTP
350
-
351
351
  domain_id = resolve_domain_id(domain_name=domain_name)
352
352
 
353
353
  client = fabric.FabricRestClient()
@@ -364,6 +364,8 @@ def unassign_domain_workspaces(domain_name: str, workspace_names: str | List[str
364
364
  """
365
365
  Unassigns workspaces from the specified domain by workspace.
366
366
 
367
+ This is a wrapper function for the following API: `Domains - Unassign Domain Workspaces By Ids <https://learn.microsoft.com/rest/api/fabric/admin/domains/unassign-domain-workspaces-by-ids>`_.
368
+
367
369
  Parameters
368
370
  ----------
369
371
  domain_name : str
@@ -372,8 +374,6 @@ def unassign_domain_workspaces(domain_name: str, workspace_names: str | List[str
372
374
  The Fabric workspace(s).
373
375
  """
374
376
 
375
- # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/unassign-domain-workspaces-by-ids?tabs=HTTP
376
-
377
377
  domain_id = resolve_domain_id(domain_name=domain_name)
378
378
 
379
379
  if isinstance(workspace_names, str):
@@ -21,6 +21,7 @@ from sempy_labs.directlake._show_unsupported_directlake_objects import (
21
21
  )
22
22
  from sempy_labs.directlake._update_directlake_model_lakehouse_connection import (
23
23
  update_direct_lake_model_lakehouse_connection,
24
+ update_direct_lake_model_connection,
24
25
  )
25
26
  from sempy_labs.directlake._update_directlake_partition_entity import (
26
27
  update_direct_lake_partition_entity,
@@ -50,4 +51,5 @@ __all__ = [
50
51
  "add_table_to_direct_lake_semantic_model",
51
52
  "generate_direct_lake_semantic_model",
52
53
  "get_direct_lake_source",
54
+ "update_direct_lake_model_connection",
53
55
  ]
@@ -3,6 +3,7 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.lakehouse import get_lakehouse_columns
4
4
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
5
5
  from sempy_labs.tom import connect_semantic_model
6
+ from sempy_labs._helper_functions import _convert_data_type
6
7
  from typing import Optional
7
8
  from sempy._utils._log import log
8
9
  import sempy_labs._icons as icons
@@ -88,7 +89,7 @@ def direct_lake_schema_sync(
88
89
  f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset}' semantic model within the '{workspace}' workspace."
89
90
  )
90
91
  if add_to_model:
91
- dt = icons.data_type_mapping.get(dType)
92
+ dt = _convert_data_type(dType)
92
93
  tom.add_data_column(
93
94
  table_name=table_name,
94
95
  column_name=lakeCName,
@@ -9,6 +9,7 @@ from sempy_labs._helper_functions import (
9
9
  retry,
10
10
  resolve_dataset_id,
11
11
  resolve_lakehouse_name,
12
+ _convert_data_type,
12
13
  )
13
14
 
14
15
 
@@ -130,6 +131,8 @@ def generate_direct_lake_semantic_model(
130
131
 
131
132
  dfLT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)
132
133
 
134
+ icons.sll_tags.append("GenerateDLModel")
135
+
133
136
  # Validate lakehouse tables
134
137
  for t in lakehouse_tables:
135
138
  if t not in dfLT["Table Name"].values:
@@ -178,7 +181,7 @@ def generate_direct_lake_semantic_model(
178
181
  for i, r in dfLC_filt.iterrows():
179
182
  lakeCName = r["Column Name"]
180
183
  dType = r["Data Type"]
181
- dt = icons.data_type_mapping.get(dType)
184
+ dt = _convert_data_type(dType)
182
185
  tom.add_data_column(
183
186
  table_name=t,
184
187
  column_name=lakeCName,
@@ -30,7 +30,7 @@ def get_shared_expression(
30
30
  workspace = fabric.resolve_workspace_name(workspace)
31
31
  if lakehouse is None:
32
32
  lakehouse_id = fabric.get_lakehouse_id()
33
- lakehouse = resolve_lakehouse_name(lakehouse_id)
33
+ lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
34
34
 
35
35
  dfL = list_lakehouses(workspace=workspace)
36
36
  lakeDetail = dfL[dfL["Lakehouse Name"] == lakehouse]
@@ -39,6 +39,12 @@ def get_shared_expression(
39
39
  sqlepid = lakeDetail["SQL Endpoint ID"].iloc[0]
40
40
  provStatus = lakeDetail["SQL Endpoint Provisioning Status"].iloc[0]
41
41
 
42
+ parts = sqlEPCS.split(".", 1)
43
+ if parts:
44
+ parts[0] = parts[0].upper()
45
+
46
+ sqlEPCS = ".".join(parts)
47
+
42
48
  if provStatus == "InProgress":
43
49
  raise ValueError(
44
50
  f"{icons.red_dot} The SQL Endpoint for the '{lakehouse}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
@@ -81,6 +81,7 @@ def get_directlake_guardrails_for_sku(sku_size: str) -> pd.DataFrame:
81
81
  """
82
82
 
83
83
  df = get_direct_lake_guardrails()
84
- filtered_df = df[df["Fabric SKUs"] == sku_size]
84
+ col_name = df.columns[0]
85
+ filtered_df = df[df[col_name] == sku_size]
85
86
 
86
87
  return filtered_df
@@ -59,13 +59,7 @@ def show_unsupported_direct_lake_objects(
59
59
  dfR["From Column Data Type"] = merged_from["Data Type"]
60
60
  dfR["To Column Data Type"] = merged_to["Data Type"]
61
61
 
62
- dfR_filt = dfR[
63
- (
64
- (dfR["From Column Data Type"] == "DateTime")
65
- | (dfR["To Column Data Type"] == "DateTime")
66
- )
67
- | (dfR["From Column Data Type"] != dfR["To Column Data Type"])
68
- ]
62
+ dfR_filt = dfR[(dfR["From Column Data Type"] != dfR["To Column Data Type"])]
69
63
  r = dfR_filt[
70
64
  [
71
65
  "From Table",
@@ -1,5 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
3
+ from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
3
4
  from sempy_labs._helper_functions import (
4
5
  resolve_lakehouse_name,
5
6
  )
@@ -53,6 +54,8 @@ def update_direct_lake_model_lakehouse_connection(
53
54
  f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
54
55
  )
55
56
 
57
+ icons.sll_tags.append("UpdateDLConnection")
58
+
56
59
  shEx = get_shared_expression(lakehouse, lakehouse_workspace)
57
60
 
58
61
  with connect_semantic_model(
@@ -69,3 +72,78 @@ def update_direct_lake_model_lakehouse_connection(
69
72
  print(
70
73
  f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
71
74
  )
75
+
76
+
77
+ def update_direct_lake_model_connection(
78
+ dataset: str,
79
+ workspace: Optional[str] = None,
80
+ source: Optional[str] = None,
81
+ source_type: Optional[str] = "Lakehouse",
82
+ source_workspace: Optional[str] = None,
83
+ ):
84
+ """
85
+ Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse/warehouse.
86
+
87
+ Parameters
88
+ ----------
89
+ dataset : str
90
+ Name of the semantic model.
91
+ workspace : str, default=None
92
+ The Fabric workspace name in which the semantic model exists.
93
+ Defaults to None which resolves to the workspace of the attached lakehouse
94
+ or if no lakehouse attached, resolves to the workspace of the notebook.
95
+ source : str, default=None
96
+ The name of the Fabric lakehouse/warehouse used by the Direct Lake semantic model.
97
+ Defaults to None which resolves to the lakehouse attached to the notebook.
98
+ source_type : str, default="Lakehouse"
99
+ The type of source for the Direct Lake semantic model. Valid options: "Lakehouse", "Warehouse".
100
+ source_workspace : str, default=None
101
+ The Fabric workspace used by the lakehouse/warehouse.
102
+ Defaults to None which resolves to the workspace of the attached lakehouse
103
+ or if no lakehouse attached, resolves to the workspace of the notebook.
104
+ """
105
+
106
+ if workspace is None:
107
+ workspace = fabric.resolve_workspace_name(workspace)
108
+
109
+ source_type = source_type.capitalize()
110
+
111
+ if source_type not in ["Lakehouse", "Warehouse"]:
112
+ raise ValueError(
113
+ f"{icons.red_dot} The 'source_type' must be either 'Lakehouse' or 'Warehouse'."
114
+ )
115
+
116
+ if source_workspace is None:
117
+ source_workspace = workspace
118
+
119
+ if source is None:
120
+ source_id = fabric.get_lakehouse_id()
121
+ source = resolve_lakehouse_name(source_id, source_workspace)
122
+ else:
123
+ source_id = fabric.resolve_item_id(
124
+ item_name=source, type=source_type, workspace=source_workspace
125
+ )
126
+ source = fabric.resolve_item_name(
127
+ item_id=source_id, workspace=source_workspace, type=source_type
128
+ )
129
+
130
+ icons.sll_tags.append("UpdateDLConnection")
131
+
132
+ shEx = generate_shared_expression(
133
+ item_name=source, item_type=source_type, workspace=source_workspace
134
+ )
135
+
136
+ with connect_semantic_model(
137
+ dataset=dataset, readonly=False, workspace=workspace
138
+ ) as tom:
139
+
140
+ if not tom.is_direct_lake():
141
+ raise ValueError(
142
+ f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
143
+ )
144
+
145
+ tom.model.Expressions["DatabaseQuery"].Expression = shEx
146
+
147
+ print(
148
+ f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
149
+ )
@@ -3,6 +3,7 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.tom import connect_semantic_model
4
4
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
5
5
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
6
+ from sempy_labs._helper_functions import _convert_data_type
6
7
  from typing import List, Optional, Union
7
8
  import sempy_labs._icons as icons
8
9
 
@@ -12,7 +13,6 @@ def update_direct_lake_partition_entity(
12
13
  table_name: Union[str, List[str]],
13
14
  entity_name: Union[str, List[str]],
14
15
  workspace: Optional[str] = None,
15
- **kwargs,
16
16
  ):
17
17
  """
18
18
  Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
@@ -31,28 +31,8 @@ def update_direct_lake_partition_entity(
31
31
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
32
  """
33
33
 
34
- if "lakehouse" in kwargs:
35
- print(
36
- "The 'lakehouse' parameter has been deprecated as it is no longer necessary. Please remove this parameter from the function going forward."
37
- )
38
- del kwargs["lakehouse"]
39
- if "lakehouse_workspace" in kwargs:
40
- print(
41
- "The 'lakehouse_workspace' parameter has been deprecated as it is no longer necessary. Please remove this parameter from the function going forward."
42
- )
43
- del kwargs["lakehouse_workspace"]
44
-
45
- workspace = fabric.resolve_workspace_name(workspace)
46
-
47
- artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
48
- get_direct_lake_source(dataset=dataset, workspace=workspace)
49
- )
50
-
51
- if artifact_type == "Warehouse":
52
- raise ValueError(
53
- f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from lakehouses, not warehouses."
54
- )
55
- lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
34
+ if workspace is None:
35
+ workspace = fabric.resolve_workspace_name(workspace)
56
36
 
57
37
  # Support both str & list types
58
38
  if isinstance(table_name, str):
@@ -65,6 +45,8 @@ def update_direct_lake_partition_entity(
65
45
  f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length."
66
46
  )
67
47
 
48
+ icons.sll_tags.append("UpdateDLPartition")
49
+
68
50
  with connect_semantic_model(
69
51
  dataset=dataset, readonly=False, workspace=workspace
70
52
  ) as tom:
@@ -88,12 +70,11 @@ def update_direct_lake_partition_entity(
88
70
  raise ValueError(
89
71
  f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated."
90
72
  )
91
- else:
92
- tom.model.Tables[tName].Partitions[part_name].EntityName = eName
93
- print(
94
- f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table "
95
- f"in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
96
- )
73
+
74
+ tom.model.Tables[tName].Partitions[part_name].EntityName = eName
75
+ print(
76
+ f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table."
77
+ )
97
78
 
98
79
 
99
80
  def add_table_to_direct_lake_semantic_model(
@@ -149,9 +130,9 @@ def add_table_to_direct_lake_semantic_model(
149
130
 
150
131
  table_count = tom.model.Tables.Count
151
132
 
152
- if tom.is_direct_lake() is False and table_count > 0:
133
+ if not tom.is_direct_lake() and table_count > 0:
153
134
  raise ValueError(
154
- "This function is only valid for Direct Lake semantic models or semantic models with no tables."
135
+ f"{icons.red_dot} This function is only valid for Direct Lake semantic models or semantic models with no tables."
155
136
  )
156
137
 
157
138
  if any(
@@ -204,7 +185,7 @@ def add_table_to_direct_lake_semantic_model(
204
185
  for i, r in dfLC_filt.iterrows():
205
186
  lakeCName = r["Column Name"]
206
187
  dType = r["Data Type"]
207
- dt = icons.data_type_mapping.get(dType)
188
+ dt = _convert_data_type(dType)
208
189
  tom.add_data_column(
209
190
  table_name=table_name,
210
191
  column_name=lakeCName,
@@ -31,12 +31,14 @@ def get_lakehouse_tables(
31
31
  """
32
32
  Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.
33
33
 
34
+ This is a wrapper function for the following API: `Tables - List Tables <https://learn.microsoft.com/rest/api/fabric/lakehouse/tables/list-tables`_ plus extended capabilities.
35
+
34
36
  Parameters
35
37
  ----------
36
38
  lakehouse : str, default=None
37
39
  The Fabric lakehouse.
38
40
  Defaults to None which resolves to the lakehouse attached to the notebook.
39
- lakehouse_workspace : str, default=None
41
+ workspace : str, default=None
40
42
  The Fabric workspace used by the lakehouse.
41
43
  Defaults to None which resolves to the workspace of the attached lakehouse
42
44
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -155,7 +157,9 @@ def get_lakehouse_tables(
155
157
  if extended:
156
158
  intColumns = ["Files", "Row Groups", "Table Size"]
157
159
  df[intColumns] = df[intColumns].astype(int)
158
- df["SKU"] = guardrail["Fabric SKUs"].iloc[0]
160
+
161
+ col_name = guardrail.columns[0]
162
+ df["SKU"] = guardrail[col_name].iloc[0]
159
163
  df["Parquet File Guardrail"] = guardrail["Parquet files per table"].iloc[0]
160
164
  df["Row Group Guardrail"] = guardrail["Row groups per table"].iloc[0]
161
165
  df["Row Count Guardrail"] = (
@@ -20,6 +20,8 @@ def create_shortcut_onelake(
20
20
  """
21
21
  Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
22
22
 
23
+ This is a wrapper function for the following API: `OneLake Shortcuts - Create Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/create-shortcut`_.
24
+
23
25
  Parameters
24
26
  ----------
25
27
  table_name : str
@@ -169,6 +171,8 @@ def delete_shortcut(
169
171
  """
170
172
  Deletes a shortcut.
171
173
 
174
+ This is a wrapper function for the following API: `OneLake Shortcuts - Delete Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/delete-shortcut`_.
175
+
172
176
  Parameters
173
177
  ----------
174
178
  shortcut_name : str
@@ -282,6 +282,8 @@ def migrate_field_parameters(
282
282
  if new_dataset_workspace is None:
283
283
  new_dataset_workspace = workspace
284
284
 
285
+ icons.sll_tags.append("DirectLakeMigration")
286
+
285
287
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
286
288
  dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
287
289
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
@@ -337,6 +339,7 @@ def migrate_field_parameters(
337
339
  try:
338
340
  par = TOM.Partition()
339
341
  par.Name = tName
342
+ par.Mode = TOM.ModeType.Import
340
343
 
341
344
  parSource = TOM.CalculatedPartitionSource()
342
345
  par.Source = parSource
@@ -345,7 +348,6 @@ def migrate_field_parameters(
345
348
  tbl = TOM.Table()
346
349
  tbl.Name = tName
347
350
  tbl.LineageTag = generate_guid()
348
- tbl.SourceLineageTag = generate_guid()
349
351
  tbl.Partitions.Add(par)
350
352
 
351
353
  columns = ["Value1", "Value2", "Value3"]
@@ -356,7 +358,6 @@ def migrate_field_parameters(
356
358
  col.SourceColumn = "[" + colName + "]"
357
359
  col.DataType = TOM.DataType.String
358
360
  col.LineageTag = generate_guid()
359
- col.SourceLineageTag = generate_guid()
360
361
 
361
362
  tbl.Columns.Add(col)
362
363
 
@@ -64,6 +64,8 @@ def migrate_calc_tables_to_semantic_model(
64
64
  lakehouse_id = fabric.get_lakehouse_id()
65
65
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
66
66
 
67
+ icons.sll_tags.append("DirectLakeMigration")
68
+
67
69
  # Get calc tables but not field parameters
68
70
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
69
71
  dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
@@ -52,6 +52,8 @@ def migrate_model_objects_to_semantic_model(
52
52
  if new_dataset_workspace is None:
53
53
  new_dataset_workspace = workspace
54
54
 
55
+ icons.sll_tags.append("DirectLakeMigration")
56
+
55
57
  dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
56
58
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
57
59
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
@@ -334,14 +336,6 @@ def migrate_model_objects_to_semantic_model(
334
336
  print(
335
337
  f"{icons.warning} The {relName} relationship was not created as Direct Lake does not support relationships based on columns with different data types."
336
338
  )
337
- # Direct Lake using DateTime columns
338
- elif isDirectLake and (
339
- r.FromColumn.DataType == TOM.DataType.DateTime
340
- or r.ToColumn.DataType == TOM.DataType.DateTime
341
- ):
342
- print(
343
- f"{icons.red_dot} The {relName} relationship was not created as Direct Lake does not support relationships based on columns of DateTime data type."
344
- )
345
339
  # Columns do not exist in the new semantic model
346
340
  elif not any(
347
341
  c.Name == r.FromColumn.Name and c.Parent.Name == r.FromTable.Name
@@ -61,6 +61,8 @@ def migrate_tables_columns_to_semantic_model(
61
61
  lakehouse_id = fabric.get_lakehouse_id()
62
62
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
63
63
 
64
+ icons.sll_tags.append("DirectLakeMigration")
65
+
64
66
  # Check that lakehouse is attached to the notebook
65
67
  if not lakehouse_attached() and (lakehouse is None and lakehouse_workspace is None):
66
68
  raise ValueError(
@@ -106,9 +108,24 @@ def migrate_tables_columns_to_semantic_model(
106
108
  with connect_semantic_model(
107
109
  dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
108
110
  ) as tom:
111
+
112
+ # Additional updates
113
+ tom.set_annotation(
114
+ object=tom.model, name="__PBI_TimeIntelligenceEnabled", value="0"
115
+ )
116
+ tom.set_annotation(
117
+ object=tom.model, name="PBI_QueryOrder", value='["DatabaseQuery"]'
118
+ )
119
+
120
+ # Begin migration
109
121
  if not any(e.Name == "DatabaseQuery" for e in tom.model.Expressions):
110
122
  tom.add_expression("DatabaseQuery", expression=shEx)
111
123
  print(f"{icons.green_dot} The 'DatabaseQuery' expression has been added.")
124
+ tom.set_annotation(
125
+ object=tom.model.Expressions["DatabaseQuery"],
126
+ name="PBI_IncludeFutureArtifacts",
127
+ value="False",
128
+ )
112
129
 
113
130
  for i, r in dfT_filt.iterrows():
114
131
  tName = r["Name"]
@@ -46,6 +46,8 @@ def migration_validation(
46
46
  if new_dataset_workspace is None:
47
47
  new_dataset_workspace = workspace
48
48
 
49
+ icons.sll_tags.append("DirectLakeMigration")
50
+
49
51
  dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
50
52
  dfB = list_semantic_model_objects(
51
53
  dataset=new_dataset, workspace=new_dataset_workspace
@@ -26,6 +26,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
26
26
 
27
27
  spark = SparkSession.builder.getOrCreate()
28
28
  workspace = fabric.resolve_workspace_name(workspace)
29
+ icons.sll_tags.append("DirectLakeMigration")
29
30
 
30
31
  @retry(
31
32
  sleep_time=1,
@@ -1,7 +1,9 @@
1
1
  from sempy_labs.report._reportwrapper import (
2
2
  ReportWrapper,
3
3
  )
4
-
4
+ from sempy_labs.report._paginated import (
5
+ get_report_datasources,
6
+ )
5
7
  from sempy_labs.report._generate_report import (
6
8
  create_report_from_reportjson,
7
9
  get_report_definition,
@@ -39,4 +41,5 @@ __all__ = [
39
41
  "ReportWrapper",
40
42
  "report_bpa_rules",
41
43
  "run_report_bpa",
44
+ "get_report_datasources",
42
45
  ]
@@ -23,6 +23,8 @@ def create_report_from_reportjson(
23
23
  """
24
24
  Creates a report based on a report.json file (and an optional themes.json file).
25
25
 
26
+ This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report`_.
27
+
26
28
  Parameters
27
29
  ----------
28
30
  report : str
@@ -124,6 +126,8 @@ def update_report_from_reportjson(
124
126
  """
125
127
  Updates a report based on a report.json file.
126
128
 
129
+ This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition`_.
130
+
127
131
  Parameters
128
132
  ----------
129
133
  report : str
@@ -179,6 +183,8 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
179
183
  """
180
184
  Gets the collection of definition files of a report.
181
185
 
186
+ This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition`_.
187
+
182
188
  Parameters
183
189
  ----------
184
190
  report : str