semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (108) hide show
  1. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
  2. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
  3. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +38 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +138 -25
  44. sempy_labs/_capacity_migration.py +161 -60
  45. sempy_labs/_clear_cache.py +3 -3
  46. sempy_labs/_data_pipelines.py +54 -0
  47. sempy_labs/_dataflows.py +4 -0
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +190 -0
  53. sempy_labs/_generate_semantic_model.py +26 -4
  54. sempy_labs/_git.py +15 -15
  55. sempy_labs/_helper_functions.py +186 -11
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +6 -3
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_warehouses.py +2 -0
  62. sempy_labs/_ml_experiments.py +6 -0
  63. sempy_labs/_ml_models.py +6 -0
  64. sempy_labs/_model_bpa.py +11 -6
  65. sempy_labs/_model_bpa_bulk.py +14 -30
  66. sempy_labs/_model_bpa_rules.py +8 -3
  67. sempy_labs/_notebooks.py +111 -15
  68. sempy_labs/_query_scale_out.py +8 -6
  69. sempy_labs/_refresh_semantic_model.py +299 -49
  70. sempy_labs/_spark.py +12 -5
  71. sempy_labs/_sql.py +2 -2
  72. sempy_labs/_translations.py +16 -14
  73. sempy_labs/_vertipaq.py +127 -116
  74. sempy_labs/_warehouses.py +90 -1
  75. sempy_labs/_workloads.py +128 -0
  76. sempy_labs/_workspace_identity.py +4 -4
  77. sempy_labs/_workspaces.py +14 -1
  78. sempy_labs/admin/__init__.py +2 -0
  79. sempy_labs/admin/_basic_functions.py +203 -58
  80. sempy_labs/admin/_domains.py +18 -18
  81. sempy_labs/directlake/__init__.py +2 -0
  82. sempy_labs/directlake/_directlake_schema_sync.py +2 -6
  83. sempy_labs/directlake/_dl_helper.py +4 -1
  84. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +3 -2
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  91. sempy_labs/lakehouse/_shortcuts.py +4 -0
  92. sempy_labs/migration/_create_pqt_file.py +2 -2
  93. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  94. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  95. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  96. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  97. sempy_labs/migration/_migration_validation.py +2 -0
  98. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  99. sempy_labs/report/__init__.py +4 -1
  100. sempy_labs/report/_generate_report.py +16 -14
  101. sempy_labs/report/_paginated.py +74 -0
  102. sempy_labs/report/_report_bpa.py +8 -10
  103. sempy_labs/report/_report_functions.py +19 -19
  104. sempy_labs/report/_report_rebind.py +6 -1
  105. sempy_labs/report/_reportwrapper.py +3 -3
  106. sempy_labs/tom/_model.py +173 -67
  107. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  108. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
@@ -26,6 +26,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
26
26
 
27
27
  spark = SparkSession.builder.getOrCreate()
28
28
  workspace = fabric.resolve_workspace_name(workspace)
29
+ icons.sll_tags.append("DirectLakeMigration")
29
30
 
30
31
  @retry(
31
32
  sleep_time=1,
@@ -1,7 +1,9 @@
1
1
  from sempy_labs.report._reportwrapper import (
2
2
  ReportWrapper,
3
3
  )
4
-
4
+ from sempy_labs.report._paginated import (
5
+ get_report_datasources,
6
+ )
5
7
  from sempy_labs.report._generate_report import (
6
8
  create_report_from_reportjson,
7
9
  get_report_definition,
@@ -39,4 +41,5 @@ __all__ = [
39
41
  "ReportWrapper",
40
42
  "report_bpa_rules",
41
43
  "run_report_bpa",
44
+ "get_report_datasources",
42
45
  ]
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import json
4
4
  import os
5
- import time
6
5
  from typing import Optional
7
6
  from sempy_labs._helper_functions import (
8
7
  resolve_workspace_name_and_id,
@@ -12,7 +11,6 @@ from sempy_labs._helper_functions import (
12
11
  )
13
12
  import sempy_labs._icons as icons
14
13
  from sempy._utils._log import log
15
- from sempy.fabric.exceptions import FabricHTTPException
16
14
 
17
15
 
18
16
  def create_report_from_reportjson(
@@ -25,6 +23,8 @@ def create_report_from_reportjson(
25
23
  """
26
24
  Creates a report based on a report.json file (and an optional themes.json file).
27
25
 
26
+ This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report`_.
27
+
28
28
  Parameters
29
29
  ----------
30
30
  report : str
@@ -113,7 +113,7 @@ def create_report_from_reportjson(
113
113
 
114
114
  response = client.post(f"/v1/workspaces/{workspace_id}/reports", json=request_body)
115
115
 
116
- lro(client, response, status_codes=[201, 202])
116
+ lro(client, response, status_codes=[201, 202], return_status_code=True)
117
117
 
118
118
  print(
119
119
  f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace."
@@ -126,6 +126,8 @@ def update_report_from_reportjson(
126
126
  """
127
127
  Updates a report based on a report.json file.
128
128
 
129
+ This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition`_.
130
+
129
131
  Parameters
130
132
  ----------
131
133
  report : str
@@ -181,6 +183,8 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
181
183
  """
182
184
  Gets the collection of definition files of a report.
183
185
 
186
+ This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition`_.
187
+
184
188
  Parameters
185
189
  ----------
186
190
  report : str
@@ -313,50 +317,48 @@ def _create_report(
313
317
  dataset: str,
314
318
  dataset_workspace: Optional[str] = None,
315
319
  report_workspace: Optional[str] = None,
316
- update_if_exists: bool = False,
320
+ overwrite: bool = False,
317
321
  ):
318
322
 
319
323
  from sempy_labs.report import report_rebind
320
324
 
321
325
  report_workspace = fabric.resolve_workspace_name(report_workspace)
322
326
  report_workspace_id = fabric.resolve_workspace_id(report_workspace)
323
- client = fabric.FabricRestClient()
327
+ dataset_workspace = fabric.resolve_workspace_name(dataset_workspace)
324
328
 
325
329
  dfR = fabric.list_reports(workspace=report_workspace)
326
330
  dfR_filt = dfR[dfR["Name"] == report]
327
331
 
328
332
  updated_report = False
329
-
333
+ client = fabric.FabricRestClient()
330
334
  # Create report if it does not exist
331
335
  if len(dfR_filt) == 0:
332
336
  response = client.post(
333
337
  f"/v1/workspaces/{report_workspace_id}/reports",
334
338
  json=request_body,
335
- lro_wait=True,
336
339
  )
337
- if response.status_code not in [200, 201]:
338
- raise FabricHTTPException(response)
340
+
341
+ lro(client, response, status_codes=[201, 202], return_status_code=True)
342
+
339
343
  print(
340
344
  f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace}'"
341
345
  )
342
346
  updated_report = True
343
347
  # Update the report if it exists
344
- elif len(dfR_filt) > 0 and update_if_exists:
348
+ elif len(dfR_filt) > 0 and overwrite:
345
349
  report_id = dfR_filt["Id"].iloc[0]
346
350
  response = client.post(
347
351
  f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
348
352
  json=request_body,
349
- lro_wait=True,
350
353
  )
351
- if response.status_code not in [200, 201]:
352
- raise FabricHTTPException(response)
354
+ lro(client, response, return_status_code=True)
353
355
  print(
354
356
  f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace}'"
355
357
  )
356
358
  updated_report = True
357
359
  else:
358
360
  raise ValueError(
359
- f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and it was selected not to update it if the report already exists."
361
+ f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and the 'overwrite' parameter was set to False."
360
362
  )
361
363
 
362
364
  # Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
@@ -0,0 +1,74 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional
3
+ from sempy.fabric.exceptions import FabricHTTPException
4
+ import pandas as pd
5
+
6
+
7
+ def get_report_datasources(
8
+ report: str,
9
+ workspace: Optional[str] = None,
10
+ ) -> pd.DataFrame:
11
+ """
12
+ Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
13
+
14
+ Parameters
15
+ ----------
16
+ report : str | List[str]
17
+ Name(s) of the Power BI report(s).
18
+ workspace : str, default=None
19
+ The name of the Fabric workspace in which the report resides.
20
+ Defaults to None which resolves to the workspace of the attached lakehouse
21
+ or if no lakehouse attached, resolves to the workspace of the notebook.
22
+
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing a list of data sources for the specified paginated report (RDL) from the specified workspace.
27
+ """
28
+
29
+ df = pd.DataFrame(
30
+ columns=[
31
+ "Report Name",
32
+ "Report Id",
33
+ "Datasource Id",
34
+ "Datasource Type",
35
+ "Gateway Id",
36
+ "Server",
37
+ "Database",
38
+ ]
39
+ )
40
+
41
+ if workspace is None:
42
+ workspace_id = fabric.get_workspace_id()
43
+ workspace = fabric.resolve_workspace_name(workspace_id)
44
+ else:
45
+ workspace_id = fabric.resolve_workspace_id(workspace)
46
+
47
+ report_id = fabric.resolve_item_id(
48
+ item_name=report, type="PaginatedReport", workspace=workspace
49
+ )
50
+
51
+ client = fabric.PowerBIRestClient()
52
+
53
+ response = client.get(
54
+ f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources"
55
+ )
56
+
57
+ if response.status_code != 200:
58
+ raise FabricHTTPException(response)
59
+
60
+ for i in response.json().get("value", []):
61
+ conn = i.get("connectionDetails", {})
62
+ new_data = {
63
+ "Report Name": report,
64
+ "Report Id": report_id,
65
+ "Datasource Id": i.get("datasourceId"),
66
+ "Datasource Type": i.get("datasourceType"),
67
+ "Gateway Id": i.get("gatewayId"),
68
+ "Server": conn.get("server") if conn else None,
69
+ "Database": conn.get("database") if conn else None,
70
+ }
71
+
72
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
73
+
74
+ return df
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
10
10
  resolve_report_id,
11
11
  resolve_lakehouse_name,
12
12
  resolve_workspace_capacity,
13
+ _get_max_run_id,
13
14
  )
14
15
  from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
15
16
  import sempy_labs._icons as icons
@@ -192,8 +193,6 @@ def run_report_bpa(
192
193
  return finalDF
193
194
 
194
195
  if export:
195
- from pyspark.sql import SparkSession
196
-
197
196
  if not lakehouse_attached():
198
197
  raise ValueError(
199
198
  f"{icons.red_dot} In order to export the BPA results, a lakehouse must be attached to the notebook."
@@ -202,7 +201,7 @@ def run_report_bpa(
202
201
  now = datetime.datetime.now()
203
202
  delta_table_name = "reportbparesults"
204
203
  lakehouse_id = fabric.get_lakehouse_id()
205
- lake_workspace = fabric.get_workspace_id()
204
+ lake_workspace = fabric.resolve_workspace_name()
206
205
  lakehouse = resolve_lakehouse_name(
207
206
  lakehouse_id=lakehouse_id, workspace=lake_workspace
208
207
  )
@@ -210,15 +209,13 @@ def run_report_bpa(
210
209
  lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
211
210
  lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
212
211
 
213
- spark = SparkSession.builder.getOrCreate()
214
- query = f"SELECT MAX(RunId) FROM {lakehouse}.{delta_table_name}"
215
-
216
212
  if len(lakeT_filt) == 0:
217
213
  runId = 1
218
214
  else:
219
- dfSpark = spark.sql(query)
220
- maxRunId = dfSpark.collect()[0][0]
221
- runId = maxRunId + 1
215
+ max_run_id = _get_max_run_id(
216
+ lakehouse=lakehouse, table_name=delta_table_name
217
+ )
218
+ runId = max_run_id + 1
222
219
 
223
220
  export_df = finalDF.copy()
224
221
  capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace)
@@ -232,7 +229,7 @@ def run_report_bpa(
232
229
  export_df["Timestamp"] = now
233
230
  export_df["RunId"] = export_df["RunId"].astype(int)
234
231
 
235
- export_df = [
232
+ export_df = export_df[
236
233
  [
237
234
  "Capacity Name",
238
235
  "Capacity Id",
@@ -249,6 +246,7 @@ def run_report_bpa(
249
246
  "URL",
250
247
  ]
251
248
  ]
249
+
252
250
  save_as_delta_table(
253
251
  dataframe=export_df,
254
252
  delta_table_name=delta_table_name,
@@ -33,6 +33,8 @@ def get_report_json(
33
33
  """
34
34
  Gets the report.json file content of a Power BI report.
35
35
 
36
+ This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition`_.
37
+
36
38
  Parameters
37
39
  ----------
38
40
  report : str
@@ -157,6 +159,8 @@ def export_report(
157
159
  """
158
160
  Exports a Power BI report to a file in your lakehouse.
159
161
 
162
+ This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
163
+
160
164
  Parameters
161
165
  ----------
162
166
  report : str
@@ -181,6 +185,8 @@ def export_report(
181
185
  """
182
186
 
183
187
  # https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
188
+ # https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group
189
+ # https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group
184
190
 
185
191
  if not lakehouse_attached():
186
192
  raise ValueError(
@@ -222,7 +228,6 @@ def export_report(
222
228
  }
223
229
 
224
230
  export_format = export_format.upper()
225
-
226
231
  fileExt = validFormats.get(export_format)
227
232
  if fileExt is None:
228
233
  raise ValueError(
@@ -286,9 +291,6 @@ def export_report(
286
291
  reportId = dfI_filt["Id"].iloc[0]
287
292
  client = fabric.PowerBIRestClient()
288
293
 
289
- dfVisual = list_report_visuals(report=report, workspace=workspace)
290
- dfPage = list_report_pages(report=report, workspace=workspace)
291
-
292
294
  if (
293
295
  export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
294
296
  and reportType == "PaginatedReport"
@@ -314,6 +316,7 @@ def export_report(
314
316
  request_body = {"format": export_format, "powerBIReportConfiguration": {}}
315
317
 
316
318
  request_body["powerBIReportConfiguration"]["pages"] = []
319
+ dfPage = list_report_pages(report=report, workspace=workspace)
317
320
 
318
321
  for page in page_name:
319
322
  dfPage_filt = dfPage[dfPage["Page ID"] == page]
@@ -335,9 +338,11 @@ def export_report(
335
338
  request_body = {"format": export_format, "powerBIReportConfiguration": {}}
336
339
 
337
340
  request_body["powerBIReportConfiguration"]["pages"] = []
341
+ dfVisual = list_report_visuals(report=report, workspace=workspace)
338
342
  a = 0
339
343
  for page in page_name:
340
344
  visual = visual_name[a]
345
+
341
346
  dfVisual_filt = dfVisual[
342
347
  (dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
343
348
  ]
@@ -360,32 +365,25 @@ def export_report(
360
365
  request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
361
366
  report_level_filter
362
367
  ]
363
- print(request_body)
364
- response = client.post(
365
- f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/ExportTo",
366
- json=request_body,
367
- )
368
+
369
+ base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
370
+ response = client.post(f"{base_url}/ExportTo", json=request_body)
371
+
368
372
  if response.status_code == 202:
369
373
  response_body = json.loads(response.content)
370
- exportId = response_body["id"]
371
- response = client.get(
372
- f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
373
- )
374
+ export_id = response_body["id"]
375
+ response = client.get(f"{base_url}/exports/{export_id}")
374
376
  response_body = json.loads(response.content)
375
377
  while response_body["status"] not in ["Succeeded", "Failed"]:
376
378
  time.sleep(3)
377
- response = client.get(
378
- f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
379
- )
379
+ response = client.get(f"{base_url}/exports/{export_id}")
380
380
  response_body = json.loads(response.content)
381
381
  if response_body["status"] == "Failed":
382
382
  raise ValueError(
383
383
  f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
384
384
  )
385
385
  else:
386
- response = client.get(
387
- f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
388
- )
386
+ response = client.get(f"{base_url}/exports/{export_id}/file")
389
387
  print(
390
388
  f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace}' workspace to the lakehouse..."
391
389
  )
@@ -407,6 +405,8 @@ def clone_report(
407
405
  """
408
406
  Clones a Power BI report.
409
407
 
408
+ This is a wrapper function for the following API: `Reports - Clone Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group`_.
409
+
410
410
  Parameters
411
411
  ----------
412
412
  report : str
@@ -1,5 +1,8 @@
1
1
  import sempy.fabric as fabric
2
- from sempy_labs._helper_functions import resolve_dataset_id, resolve_report_id
2
+ from sempy_labs._helper_functions import (
3
+ resolve_dataset_id,
4
+ resolve_report_id,
5
+ )
3
6
  from typing import Optional, List
4
7
  from sempy._utils._log import log
5
8
  import sempy_labs._icons as icons
@@ -16,6 +19,8 @@ def report_rebind(
16
19
  """
17
20
  Rebinds a report to a semantic model.
18
21
 
22
+ This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group`_.
23
+
19
24
  Parameters
20
25
  ----------
21
26
  report : str | List[str]
@@ -7,7 +7,7 @@ from sempy_labs._helper_functions import (
7
7
  _extract_json,
8
8
  _add_part,
9
9
  lro,
10
- make_clickable,
10
+ # _make_clickable,
11
11
  )
12
12
  from typing import Optional, List
13
13
  import pandas as pd
@@ -370,7 +370,7 @@ class ReportWrapper:
370
370
  df = self._add_extended(dataframe=df)
371
371
 
372
372
  return df
373
- # return df.style.format({"Page URL": make_clickable})
373
+ # return df.style.format({"Page URL": _make_clickable})
374
374
 
375
375
  def list_visual_filters(self, extended: bool = False) -> pd.DataFrame:
376
376
  """
@@ -642,7 +642,7 @@ class ReportWrapper:
642
642
  )
643
643
 
644
644
  return df
645
- # return df.style.format({"Page URL": make_clickable})
645
+ # return df.style.format({"Page URL": _make_clickable})
646
646
 
647
647
  def list_visuals(self) -> pd.DataFrame:
648
648
  """