semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (83) hide show
  1. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
  2. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_authentication.py +0 -2
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +72 -105
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +303 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +57 -101
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +175 -30
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +262 -164
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_dependencies.py +2 -0
  33. sempy_labs/_notebooks.py +28 -29
  34. sempy_labs/_one_lake_integration.py +2 -0
  35. sempy_labs/_query_scale_out.py +63 -81
  36. sempy_labs/_refresh_semantic_model.py +12 -14
  37. sempy_labs/_spark.py +54 -79
  38. sempy_labs/_sql.py +7 -11
  39. sempy_labs/_vertipaq.py +8 -3
  40. sempy_labs/_warehouses.py +30 -33
  41. sempy_labs/_workloads.py +15 -20
  42. sempy_labs/_workspace_identity.py +13 -17
  43. sempy_labs/_workspaces.py +49 -48
  44. sempy_labs/admin/__init__.py +2 -0
  45. sempy_labs/admin/_basic_functions.py +244 -281
  46. sempy_labs/admin/_domains.py +188 -103
  47. sempy_labs/admin/_external_data_share.py +26 -31
  48. sempy_labs/admin/_git.py +17 -22
  49. sempy_labs/admin/_items.py +34 -48
  50. sempy_labs/admin/_scanner.py +20 -13
  51. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  52. sempy_labs/directlake/_dl_helper.py +10 -11
  53. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  54. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  55. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  56. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  57. sempy_labs/directlake/_warm_cache.py +2 -0
  58. sempy_labs/graph/__init__.py +33 -0
  59. sempy_labs/graph/_groups.py +402 -0
  60. sempy_labs/graph/_teams.py +113 -0
  61. sempy_labs/graph/_users.py +191 -0
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  64. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  65. sempy_labs/lakehouse/_lakehouse.py +101 -4
  66. sempy_labs/lakehouse/_shortcuts.py +42 -20
  67. sempy_labs/migration/__init__.py +4 -0
  68. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  73. sempy_labs/report/_download_report.py +8 -13
  74. sempy_labs/report/_generate_report.py +49 -46
  75. sempy_labs/report/_paginated.py +20 -26
  76. sempy_labs/report/_report_functions.py +50 -45
  77. sempy_labs/report/_report_list_functions.py +2 -0
  78. sempy_labs/report/_report_rebind.py +6 -10
  79. sempy_labs/report/_reportwrapper.py +187 -220
  80. sempy_labs/tom/_model.py +8 -5
  81. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  83. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
@@ -4,12 +4,14 @@ from sempy_labs.lakehouse._lakehouse import (
4
4
  lakehouse_attached,
5
5
  optimize_lakehouse_tables,
6
6
  vacuum_lakehouse_tables,
7
+ run_table_maintenance,
7
8
  )
8
9
 
9
10
  from sempy_labs.lakehouse._shortcuts import (
10
11
  # create_shortcut,
11
12
  create_shortcut_onelake,
12
13
  delete_shortcut,
14
+ reset_shortcut_cache,
13
15
  )
14
16
 
15
17
  __all__ = [
@@ -21,4 +23,6 @@ __all__ = [
21
23
  "create_shortcut_onelake",
22
24
  "delete_shortcut",
23
25
  "vacuum_lakehouse_tables",
26
+ "reset_shortcut_cache",
27
+ "run_table_maintenance",
24
28
  ]
@@ -4,6 +4,7 @@ from sempy_labs._helper_functions import (
4
4
  format_dax_object_name,
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_lakehouse_name_and_id,
7
+ _create_dataframe,
7
8
  )
8
9
  from typing import Optional
9
10
  from sempy._utils._log import log
@@ -35,16 +36,15 @@ def get_lakehouse_columns(
35
36
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
36
37
  from delta import DeltaTable
37
38
 
38
- df = pd.DataFrame(
39
- columns=[
40
- "Workspace Name",
41
- "Lakehouse Name",
42
- "Table Name",
43
- "Column Name",
44
- "Full Column Name",
45
- "Data Type",
46
- ]
47
- )
39
+ columns = {
40
+ "Workspace Name": "string",
41
+ "Lakehouse Name": "string",
42
+ "Table Name": "string",
43
+ "Column Name": "string",
44
+ "Full Column Name": "string",
45
+ "Data Type": "string",
46
+ }
47
+ df = _create_dataframe(columns=columns)
48
48
 
49
49
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
50
50
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
@@ -7,8 +7,9 @@ from sempy_labs._helper_functions import (
7
7
  _get_column_aggregate,
8
8
  resolve_workspace_name_and_id,
9
9
  resolve_lakehouse_name_and_id,
10
- pagination,
11
10
  save_as_delta_table,
11
+ _base_api,
12
+ _create_dataframe,
12
13
  )
13
14
  from sempy_labs.directlake._guardrails import (
14
15
  get_sku_size,
@@ -18,7 +19,6 @@ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
18
19
  from typing import Optional
19
20
  import sempy_labs._icons as icons
20
21
  from sempy._utils._log import log
21
- from sempy.fabric.exceptions import FabricHTTPException
22
22
  from uuid import UUID
23
23
 
24
24
 
@@ -57,16 +57,15 @@ def get_lakehouse_tables(
57
57
  Shows the tables/columns within a lakehouse and their properties.
58
58
  """
59
59
 
60
- df = pd.DataFrame(
61
- columns=[
62
- "Workspace Name",
63
- "Lakehouse Name",
64
- "Table Name",
65
- "Format",
66
- "Type",
67
- "Location",
68
- ]
69
- )
60
+ columns = {
61
+ "Workspace Name": "string",
62
+ "Lakehouse Name": "string",
63
+ "Table Name": "string",
64
+ "Format": "string",
65
+ "Type": "string",
66
+ "Location": "string",
67
+ }
68
+ df = _create_dataframe(columns=columns)
70
69
 
71
70
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
72
71
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
@@ -86,16 +85,11 @@ def get_lakehouse_tables(
86
85
  "Count rows runs a spark query and cross-workspace spark queries are currently not supported."
87
86
  )
88
87
 
89
- client = fabric.FabricRestClient()
90
- response = client.get(
91
- f"/v1/workspaces/{workspace_id}/lakehouses/{lakehouse_id}/tables"
88
+ responses = _base_api(
89
+ request=f"v1/workspaces/{workspace_id}/lakehouses/{lakehouse_id}/tables",
90
+ uses_pagination=True,
92
91
  )
93
92
 
94
- if response.status_code != 200:
95
- raise FabricHTTPException(response)
96
-
97
- responses = pagination(client, response)
98
-
99
93
  if not responses[0].get("data"):
100
94
  return df
101
95
 
@@ -2,6 +2,13 @@ from tqdm.auto import tqdm
2
2
  from typing import List, Optional, Union
3
3
  from sempy._utils._log import log
4
4
  from uuid import UUID
5
+ from sempy_labs._helper_functions import (
6
+ _base_api,
7
+ resolve_lakehouse_name_and_id,
8
+ resolve_workspace_name_and_id,
9
+ )
10
+ import sempy_labs._icons as icons
11
+ import re
5
12
 
6
13
 
7
14
  def lakehouse_attached() -> bool:
@@ -13,12 +20,12 @@ def lakehouse_attached() -> bool:
13
20
  bool
14
21
  Returns True if a lakehouse is attached to the notebook.
15
22
  """
16
- from pyspark.sql import SparkSession
17
23
 
18
- spark = SparkSession.builder.getOrCreate()
19
- lakeId = spark.conf.get("trident.lakehouse.id")
24
+ from sempy_labs._helper_functions import _get_fabric_context_setting
25
+
26
+ lake_id = _get_fabric_context_setting(name="trident.lakehouse.id")
20
27
 
21
- if len(lakeId) > 0:
28
+ if len(lake_id) > 0:
22
29
  return True
23
30
  else:
24
31
  return False
@@ -128,3 +135,93 @@ def vacuum_lakehouse_tables(
128
135
  deltaTable.vacuum()
129
136
  else:
130
137
  deltaTable.vacuum(retain_n_hours)
138
+
139
+
140
+ def run_table_maintenance(
141
+ table_name: str,
142
+ optimize: bool = False,
143
+ v_order: bool = False,
144
+ vacuum: bool = False,
145
+ retention_period: Optional[str] = None,
146
+ schema: Optional[str] = None,
147
+ lakehouse: Optional[str | UUID] = None,
148
+ workspace: Optional[str | UUID] = None,
149
+ ):
150
+ """
151
+ Runs table maintenance operations on the specified table within the lakehouse.
152
+
153
+ This is a wrapper function for the following API: `Background Jobs - Run On Demand Table Maintenance <https://learn.microsoft.com/rest/api/fabric/lakehouse/background-jobs/run-on-demand-table-maintenance>`_.
154
+
155
+ Parameters
156
+ ----------
157
+ table_name : str
158
+ Name of the delta table on which to run maintenance operations.
159
+ optimize : bool, default=False
160
+ If True, the `OPTIMIZE <https://docs.delta.io/latest/optimizations-oss.html>`_ function will be run on the table.
161
+ v_order : bool, default=False
162
+ If True, v-order will be enabled for the table.
163
+ vacuum : bool, default=False
164
+ If True, the `VACUUM <https://docs.delta.io/latest/delta-utility.html#remove-files-no-longer-referenced-by-a-delta-table>`_ function will be run on the table.
165
+ retention_period : str, default=None
166
+ If specified, the retention period for the vacuum operation. Must be in the 'd:hh:mm:ss' format.
167
+ schema : str, default=None
168
+ The schema of the tables within the lakehouse.
169
+ lakehouse : str | uuid.UUID, default=None
170
+ The Fabric lakehouse name or ID.
171
+ Defaults to None which resolves to the lakehouse attached to the notebook.
172
+ workspace : str | uuid.UUID, default=None
173
+ The Fabric workspace name or ID used by the lakehouse.
174
+ Defaults to None which resolves to the workspace of the attached lakehouse
175
+ or if no lakehouse attached, resolves to the workspace of the notebook.
176
+ """
177
+
178
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
179
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
180
+ lakehouse=lakehouse, workspace=workspace_id
181
+ )
182
+
183
+ if not optimize and not vacuum:
184
+ raise ValueError(
185
+ f"{icons.warning} At least one of 'optimize' or 'vacuum' must be set to True."
186
+ )
187
+ if not vacuum and retention_period is not None:
188
+ raise ValueError(
189
+ f"{icons.warning} The 'retention_period' parameter can only be set if 'vacuum' is set to True."
190
+ )
191
+ if retention_period is not None:
192
+
193
+ def is_valid_format(time_string):
194
+ pattern = r"^\d+:[0-2][0-9]:[0-5][0-9]:[0-5][0-9]$"
195
+ return bool(re.match(pattern, time_string))
196
+
197
+ if not is_valid_format(retention_period):
198
+ raise ValueError(
199
+ f"{icons.red_dot} The 'retention_period' parameter must be in the 'd:hh:mm:ss' format."
200
+ )
201
+
202
+ payload = {
203
+ "executionData": {
204
+ "tableName": table_name,
205
+ }
206
+ }
207
+ if schema is not None:
208
+ payload["executionData"]["schemaName"] = schema
209
+ if optimize:
210
+ payload["executionData"]["optimizeSettings"] = {}
211
+ if v_order:
212
+ payload["executionData"]["optimizeSettings"] = {"vorder": True}
213
+ if vacuum:
214
+ payload["executionData"]["vacuumSettings"] = {}
215
+ if vacuum and retention_period is not None:
216
+ payload["executionData"]["vacuumSettings"]["retentionPeriod"] = retention_period
217
+
218
+ _base_api(
219
+ request=f"/v1/workspaces/{workspace_id}/lakehouses/{lakehouse_id}/jobs/instances?jobType=TableMaintenance",
220
+ method="post",
221
+ payload=payload,
222
+ status_codes=202,
223
+ )
224
+
225
+ print(
226
+ f"{icons.green_dot} The table maintenance job for the '{table_name}' table in the '{lakehouse_name}' lakehouse within the '{workspace_name}' workspace has been initiated."
227
+ )
@@ -3,6 +3,7 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_name,
4
4
  resolve_lakehouse_id,
5
5
  resolve_workspace_name_and_id,
6
+ _base_api,
6
7
  )
7
8
  from typing import Optional
8
9
  import sempy_labs._icons as icons
@@ -71,37 +72,30 @@ def create_shortcut_onelake(
71
72
  if shortcut_name is None:
72
73
  shortcut_name = table_name
73
74
 
74
- client = fabric.FabricRestClient()
75
- tablePath = f"Tables/{table_name}"
75
+ table_path = f"Tables/{table_name}"
76
76
 
77
- request_body = {
77
+ payload = {
78
78
  "path": "Tables",
79
79
  "name": shortcut_name.replace(" ", ""),
80
80
  "target": {
81
81
  "oneLake": {
82
82
  "workspaceId": source_workspace_id,
83
83
  "itemId": source_lakehouse_id,
84
- "path": tablePath,
84
+ "path": table_path,
85
85
  }
86
86
  },
87
87
  }
88
88
 
89
- try:
90
- response = client.post(
91
- f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
92
- json=request_body,
93
- )
94
- if response.status_code == 201:
95
- print(
96
- f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within"
97
- f" the '{destination_workspace_name} workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
98
- )
99
- else:
100
- print(response.status_code)
101
- except Exception as e:
102
- raise ValueError(
103
- f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table."
104
- ) from e
89
+ _base_api(
90
+ request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
91
+ payload=payload,
92
+ status_codes=201,
93
+ method="post",
94
+ )
95
+
96
+ print(
97
+ f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name} workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
98
+ )
105
99
 
106
100
 
107
101
  def create_shortcut(
@@ -223,3 +217,31 @@ def delete_shortcut(
223
217
  print(
224
218
  f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace_name}' workspace has been deleted."
225
219
  )
220
+
221
+
222
+ def reset_shortcut_cache(workspace: Optional[str | UUID]):
223
+ """
224
+ Deletes any cached files that were stored while reading from shortcuts.
225
+
226
+ This is a wrapper function for the following API: `OneLake Shortcuts - Reset Shortcut Cache <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/reset-shortcut-cache>`_.
227
+
228
+ Parameters
229
+ ----------
230
+ workspace : str | uuid.UUID, default=None
231
+ The name or ID of the Fabric workspace.
232
+ Defaults to None which resolves to the workspace of the attached lakehouse
233
+ or if no lakehouse attached, resolves to the workspace of the notebook.
234
+ """
235
+
236
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
237
+
238
+ _base_api(
239
+ request=f"/v1/workspaces/{workspace_id}/onelake/resetShortcutCache",
240
+ method="post",
241
+ lro_return_status_code=True,
242
+ status_codes=None,
243
+ )
244
+
245
+ print(
246
+ f"{icons.green_dot} The shortcut cache has been reset for the '{workspace_name}' workspace."
247
+ )
@@ -18,6 +18,9 @@ from sempy_labs.migration._migration_validation import (
18
18
  from sempy_labs.migration._refresh_calc_tables import (
19
19
  refresh_calc_tables,
20
20
  )
21
+ from sempy_labs.migration._direct_lake_to_import import (
22
+ migrate_direct_lake_to_import,
23
+ )
21
24
 
22
25
  __all__ = [
23
26
  "create_pqt_file",
@@ -28,4 +31,5 @@ __all__ = [
28
31
  "migrate_tables_columns_to_semantic_model",
29
32
  "migration_validation",
30
33
  "refresh_calc_tables",
34
+ "migrate_direct_lake_to_import",
31
35
  ]
@@ -0,0 +1,66 @@
1
+ import sempy
2
+ from uuid import UUID
3
+ import sempy_labs._icons as icons
4
+
5
+
6
+ def migrate_direct_lake_to_import(dataset: str | UUID, workspace: str | UUID):
7
+ """
8
+ Migrates a semantic model from Direct Lake mode to import mode. After running this function, you must go to the semantic model settings and update the cloud connection. Not doing so will result in an inablity to refresh/use the semantic model.
9
+
10
+ Parameters
11
+ ----------
12
+ dataset : str | uuid.UUID
13
+ Name or ID of the semantic model.
14
+ workspace : str | uuid.UUID, default=None
15
+ The Fabric workspace name or ID.
16
+ Defaults to None which resolves to the workspace of the attached lakehouse
17
+ or if no lakehouse attached, resolves to the workspace of the notebook.
18
+ """
19
+
20
+ sempy.fabric._client._utils._init_analysis_services()
21
+ import Microsoft.AnalysisServices.Tabular as TOM
22
+ from sempy_labs.tom import connect_semantic_model
23
+
24
+ with connect_semantic_model(
25
+ dataset=dataset, workspace=workspace, readonly=False
26
+ ) as tom:
27
+
28
+ if not tom.is_direct_lake():
29
+ print(
30
+ f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
31
+ )
32
+ return
33
+
34
+ for t in tom.model.Tables:
35
+ table_name = t.Name
36
+ if t.Partitions.Count == 1 and all(
37
+ p.Mode == TOM.ModeType.DirectLake for p in t.Partitions
38
+ ):
39
+ p = next(p for p in t.Partitions)
40
+ partition_name = p.Name
41
+ entity_name = p.Source.EntityName
42
+ schema_name = p.Source.SchemaName or "dbo"
43
+ # Rename Direct Lake partition
44
+ t.Partitions[partition_name].Name = f"{partition_name}_remove"
45
+
46
+ # Generate M expression for import partition
47
+ expression = f"""let\n\tSource = DatabaseQuery,\n\tData = Source{{[Schema="{schema_name}",Item="{entity_name}"]}}[Data]\nin\n\tData"""
48
+
49
+ # Generate M partition
50
+ tom.add_m_partition(
51
+ table_name=table_name,
52
+ partition_name=partition_name,
53
+ expression=expression,
54
+ mode="Import",
55
+ )
56
+ # Remove Direct Lake partition
57
+ tom.remove_object(object=p)
58
+
59
+ tom.model.Model.DefaultMode = TOM.ModeType.Import
60
+
61
+ # Check
62
+ # for t in tom.model.Tables:
63
+ # if t.Partitions.Count == 1 and all(p.Mode == TOM.ModeType.Import for p in t.Partitions) and t.CalculationGroup is None:
64
+ # p = next(p for p in t.Partitions)
65
+ # print(p.Name)
66
+ # print(p.Source.Expression)
@@ -283,6 +283,7 @@ def migrate_field_parameters(
283
283
  new_dataset_workspace = workspace
284
284
 
285
285
  icons.sll_tags.append("DirectLakeMigration")
286
+ fabric.refresh_tom_cache(workspace=workspace)
286
287
 
287
288
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
288
289
  dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
@@ -54,6 +54,7 @@ def migrate_calc_tables_to_semantic_model(
54
54
  )
55
55
 
56
56
  workspace = fabric.resolve_workspace_name(workspace)
57
+ fabric.refresh_tom_cache(workspace=workspace)
57
58
 
58
59
  if new_dataset_workspace is None:
59
60
  new_dataset_workspace = workspace
@@ -48,6 +48,7 @@ def migrate_model_objects_to_semantic_model(
48
48
  )
49
49
 
50
50
  workspace = fabric.resolve_workspace_name(workspace)
51
+ fabric.refresh_tom_cache(workspace=workspace)
51
52
 
52
53
  if new_dataset_workspace is None:
53
54
  new_dataset_workspace = workspace
@@ -75,6 +75,8 @@ def migrate_tables_columns_to_semantic_model(
75
75
  item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
76
76
  )
77
77
 
78
+ fabric.refresh_tom_cache(workspace=workspace)
79
+
78
80
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
79
81
  dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
80
82
  dfT.rename(columns={"Type": "Table Type"}, inplace=True)
@@ -4,14 +4,15 @@ from typing import Optional
4
4
  from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_lakehouse_name,
7
+ _base_api,
8
+ resolve_item_id,
7
9
  )
8
10
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
- from sempy.fabric.exceptions import FabricHTTPException
10
11
  from uuid import UUID
11
12
 
12
13
 
13
14
  def download_report(
14
- report: str,
15
+ report: str | UUID,
15
16
  file_name: Optional[str] = None,
16
17
  download_type: str = "LiveConnect",
17
18
  workspace: Optional[str | UUID] = None,
@@ -23,8 +24,8 @@ def download_report(
23
24
 
24
25
  Parameters
25
26
  ----------
26
- report: str
27
- Name of the report.
27
+ report: str | uuid.UUID
28
+ Name or ID of the report.
28
29
  file_name : str, default=None
29
30
  Name of the .pbix file to be saved.
30
31
  Defaults to None which resolves to the name of the report.
@@ -55,18 +56,12 @@ def download_report(
55
56
  )
56
57
 
57
58
  file_name = file_name or report
58
- report_id = fabric.resolve_item_id(
59
- item_name=report, type="Report", workspace=workspace_id
60
- )
59
+ report_id = resolve_item_id(item=report, type="Report", workspace=workspace)
61
60
 
62
- client = fabric.PowerBIRestClient()
63
- response = client.get(
64
- f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/Export?downloadType={download_type}"
61
+ response = _base_api(
62
+ request=f"v1.0/myorg/groups/{workspace_id}/reports/{report_id}/Export?downloadType={download_type}"
65
63
  )
66
64
 
67
- if response.status_code != 200:
68
- raise FabricHTTPException(response)
69
-
70
65
  # Save file to the attached lakehouse
71
66
  with open(f"/lakehouse/default/Files/{file_name}.pbix", "wb") as file:
72
67
  file.write(response.content)