semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (83) hide show
  1. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
  2. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_authentication.py +0 -2
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +72 -105
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +303 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +57 -101
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +175 -30
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +262 -164
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_dependencies.py +2 -0
  33. sempy_labs/_notebooks.py +28 -29
  34. sempy_labs/_one_lake_integration.py +2 -0
  35. sempy_labs/_query_scale_out.py +63 -81
  36. sempy_labs/_refresh_semantic_model.py +12 -14
  37. sempy_labs/_spark.py +54 -79
  38. sempy_labs/_sql.py +7 -11
  39. sempy_labs/_vertipaq.py +8 -3
  40. sempy_labs/_warehouses.py +30 -33
  41. sempy_labs/_workloads.py +15 -20
  42. sempy_labs/_workspace_identity.py +13 -17
  43. sempy_labs/_workspaces.py +49 -48
  44. sempy_labs/admin/__init__.py +2 -0
  45. sempy_labs/admin/_basic_functions.py +244 -281
  46. sempy_labs/admin/_domains.py +188 -103
  47. sempy_labs/admin/_external_data_share.py +26 -31
  48. sempy_labs/admin/_git.py +17 -22
  49. sempy_labs/admin/_items.py +34 -48
  50. sempy_labs/admin/_scanner.py +20 -13
  51. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  52. sempy_labs/directlake/_dl_helper.py +10 -11
  53. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  54. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  55. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  56. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  57. sempy_labs/directlake/_warm_cache.py +2 -0
  58. sempy_labs/graph/__init__.py +33 -0
  59. sempy_labs/graph/_groups.py +402 -0
  60. sempy_labs/graph/_teams.py +113 -0
  61. sempy_labs/graph/_users.py +191 -0
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  64. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  65. sempy_labs/lakehouse/_lakehouse.py +101 -4
  66. sempy_labs/lakehouse/_shortcuts.py +42 -20
  67. sempy_labs/migration/__init__.py +4 -0
  68. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  73. sempy_labs/report/_download_report.py +8 -13
  74. sempy_labs/report/_generate_report.py +49 -46
  75. sempy_labs/report/_paginated.py +20 -26
  76. sempy_labs/report/_report_functions.py +50 -45
  77. sempy_labs/report/_report_list_functions.py +2 -0
  78. sempy_labs/report/_report_rebind.py +6 -10
  79. sempy_labs/report/_reportwrapper.py +187 -220
  80. sempy_labs/tom/_model.py +8 -5
  81. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  83. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
@@ -6,15 +6,15 @@ from typing import Optional
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_workspace_name_and_id,
8
8
  _conv_b64,
9
- resolve_report_id,
10
9
  resolve_dataset_name_and_id,
11
10
  resolve_item_name_and_id,
12
- lro,
11
+ _update_dataframe_datatypes,
12
+ _base_api,
13
+ resolve_item_id,
13
14
  )
14
15
  import sempy_labs._icons as icons
15
16
  from sempy._utils._log import log
16
17
  from uuid import UUID
17
- from sempy.fabric.exceptions import FabricHTTPException
18
18
 
19
19
 
20
20
  def create_report_from_reportjson(
@@ -57,7 +57,6 @@ def create_report_from_reportjson(
57
57
  )
58
58
  return
59
59
 
60
- client = fabric.FabricRestClient()
61
60
  defPBIR = {
62
61
  "version": "1.0",
63
62
  "datasetReference": {
@@ -106,9 +105,13 @@ def create_report_from_reportjson(
106
105
  }
107
106
  request_body["definition"]["parts"].append(part)
108
107
 
109
- response = client.post(f"/v1/workspaces/{workspace_id}/reports", json=request_body)
110
-
111
- lro(client, response, status_codes=[201, 202], return_status_code=True)
108
+ _base_api(
109
+ request=f"/v1/workspaces/{workspace_id}/reports",
110
+ method="post",
111
+ payload=request_body,
112
+ lro_return_status_code=True,
113
+ status_codes=[201, 202],
114
+ )
112
115
 
113
116
  print(
114
117
  f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace_name}' workspace."
@@ -116,7 +119,7 @@ def create_report_from_reportjson(
116
119
 
117
120
 
118
121
  def update_report_from_reportjson(
119
- report: str, report_json: dict, workspace: Optional[str | UUID] = None
122
+ report: str | UUID, report_json: dict, workspace: Optional[str | UUID] = None
120
123
  ):
121
124
  """
122
125
  Updates a report based on a report.json file.
@@ -125,8 +128,8 @@ def update_report_from_reportjson(
125
128
 
126
129
  Parameters
127
130
  ----------
128
- report : str
129
- Name of the report.
131
+ report : str | uuid.UUID
132
+ Name or ID of the report.
130
133
  report_json : dict
131
134
  The report.json file to be used to update the report.
132
135
  workspace : str | uuid.UUID, default=None
@@ -136,7 +139,7 @@ def update_report_from_reportjson(
136
139
  """
137
140
 
138
141
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
139
- report_id = resolve_report_id(report=report, workspace=workspace_id)
142
+ report_id = resolve_item_id(item=report, type="Report", workspace=workspace)
140
143
 
141
144
  # Get the existing PBIR file
142
145
  df_items = get_report_definition(report=report, workspace=workspace_id)
@@ -144,7 +147,7 @@ def update_report_from_reportjson(
144
147
  rptDefFile = df_items_filt["payload"].iloc[0]
145
148
  payloadReportJson = _conv_b64(report_json)
146
149
 
147
- request_body = {
150
+ payload = {
148
151
  "definition": {
149
152
  "parts": [
150
153
  {
@@ -161,14 +164,14 @@ def update_report_from_reportjson(
161
164
  }
162
165
  }
163
166
 
164
- client = fabric.FabricRestClient()
165
- response = client.post(
166
- f"/v1/workspaces/{workspace_id}/reports/{report_id}/updateDefinition",
167
- json=request_body,
167
+ _base_api(
168
+ request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/updateDefinition",
169
+ method="post",
170
+ payload=payload,
171
+ lro_return_status_code=True,
172
+ status_codes=None,
168
173
  )
169
174
 
170
- lro(client, response, return_status_code=True)
171
-
172
175
  print(
173
176
  f"{icons.green_dot} The '{report}' report within the '{workspace_name}' workspace has been successfully updated."
174
177
  )
@@ -200,15 +203,15 @@ def get_report_definition(
200
203
  """
201
204
 
202
205
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
206
+ report_id = resolve_item_id(item=report, type="Report", workspace=workspace)
203
207
 
204
- report_id = resolve_report_id(report=report, workspace=workspace_id)
205
- client = fabric.FabricRestClient()
206
- response = client.post(
207
- f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
208
+ result = _base_api(
209
+ request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
210
+ method="post",
211
+ lro_return_json=True,
212
+ status_codes=None,
208
213
  )
209
214
 
210
- result = lro(client, response).json()
211
-
212
215
  if return_dataframe:
213
216
  return pd.json_normalize(result["definition"]["parts"])
214
217
  else:
@@ -238,8 +241,6 @@ def create_model_bpa_report(
238
241
  or if no lakehouse attached, resolves to the workspace of the notebook.
239
242
 
240
243
  """
241
- # from sempy_labs._helper_functions import resolve_dataset_id
242
-
243
244
  (dataset_workspace_name, dataset_workspace_id) = resolve_workspace_name_and_id(
244
245
  dataset_workspace
245
246
  )
@@ -334,28 +335,30 @@ def _create_report(
334
335
  dfR_filt = dfR[dfR["Name"] == report]
335
336
 
336
337
  updated_report = False
337
- client = fabric.FabricRestClient()
338
338
  # Create report if it does not exist
339
- if len(dfR_filt) == 0:
340
- response = client.post(
341
- f"/v1/workspaces/{report_workspace_id}/reports",
342
- json=request_body,
339
+ if dfR_filt.empty:
340
+ _base_api(
341
+ request=f"/v1/workspaces/{report_workspace_id}/reports",
342
+ method="post",
343
+ payload=request_body,
344
+ lro_return_status_code=True,
345
+ status_codes=[201, 202],
343
346
  )
344
347
 
345
- lro(client, response, status_codes=[201, 202], return_status_code=True)
346
-
347
348
  print(
348
349
  f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace}'"
349
350
  )
350
351
  updated_report = True
351
352
  # Update the report if it exists
352
- elif len(dfR_filt) > 0 and overwrite:
353
+ elif not dfR_filt.empty and overwrite:
353
354
  report_id = dfR_filt["Id"].iloc[0]
354
- response = client.post(
355
- f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
356
- json=request_body,
355
+ _base_api(
356
+ request=f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
357
+ method="post",
358
+ payload=request_body,
359
+ lro_return_status_code=True,
360
+ status_codes=None,
357
361
  )
358
- lro(client, response, return_status_code=True)
359
362
  print(
360
363
  f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace}'"
361
364
  )
@@ -384,12 +387,9 @@ def _get_report(
384
387
  item=report, type="Report", workspace=workspace
385
388
  )
386
389
 
387
- client = fabric.FabricRestClient()
388
- response = client.get(f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}")
389
-
390
- if response.status_code != 200:
391
- raise FabricHTTPException(response)
392
-
390
+ response = _base_api(
391
+ request=f"v1.0/myorg/groups/{workspace_id}/reports/{report_id}"
392
+ )
393
393
  result = response.json()
394
394
 
395
395
  new_data = {
@@ -412,7 +412,10 @@ def _get_report(
412
412
 
413
413
  df = pd.DataFrame([new_data])
414
414
 
415
- bool_cols = ["Is From Pbix", "Is Owned By Me"]
416
- df[bool_cols] = df[bool_cols].astype(bool)
415
+ column_map = {
416
+ "Is From Pbix": "bool",
417
+ "Is Owned By Me": "bool",
418
+ }
419
+ _update_dataframe_datatypes(dataframe=df, column_map=column_map)
417
420
 
418
421
  return df
@@ -1,15 +1,16 @@
1
- import sempy.fabric as fabric
2
1
  from typing import Optional
3
- from sempy.fabric.exceptions import FabricHTTPException
4
2
  import pandas as pd
5
3
  from uuid import UUID
6
4
  from sempy_labs._helper_functions import (
7
5
  resolve_workspace_name_and_id,
6
+ _base_api,
7
+ resolve_item_id,
8
+ _create_dataframe,
8
9
  )
9
10
 
10
11
 
11
12
  def get_report_datasources(
12
- report: str,
13
+ report: str | UUID,
13
14
  workspace: Optional[str | UUID] = None,
14
15
  ) -> pd.DataFrame:
15
16
  """
@@ -17,8 +18,8 @@ def get_report_datasources(
17
18
 
18
19
  Parameters
19
20
  ----------
20
- report : str | List[str]
21
- Name(s) of the Power BI report(s).
21
+ report : str | uuid.UUID
22
+ Name or ID of the Power BI report.
22
23
  workspace : str | uuid.UUID, default=None
23
24
  The name or ID of the Fabric workspace in which the report resides.
24
25
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -30,33 +31,26 @@ def get_report_datasources(
30
31
  A pandas dataframe showing a list of data sources for the specified paginated report (RDL) from the specified workspace.
31
32
  """
32
33
 
33
- df = pd.DataFrame(
34
- columns=[
35
- "Report Name",
36
- "Report Id",
37
- "Datasource Id",
38
- "Datasource Type",
39
- "Gateway Id",
40
- "Server",
41
- "Database",
42
- ]
43
- )
34
+ columns = {
35
+ "Report Name": "str",
36
+ "Report Id": "str",
37
+ "Datasource Id": "str",
38
+ "Datasource Type": "str",
39
+ "Gateway Id": "str",
40
+ "Server": "str",
41
+ "Database": "str",
42
+ }
43
+ df = _create_dataframe(columns=columns)
44
44
 
45
45
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
46
-
47
- report_id = fabric.resolve_item_id(
48
- item_name=report, type="PaginatedReport", workspace=workspace_id
46
+ report_id = resolve_item_id(
47
+ item=report, type="PaginatedReport", workspace=workspace
49
48
  )
50
49
 
51
- client = fabric.PowerBIRestClient()
52
-
53
- response = client.get(
54
- f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources"
50
+ response = _base_api(
51
+ request=f"v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources"
55
52
  )
56
53
 
57
- if response.status_code != 200:
58
- raise FabricHTTPException(response)
59
-
60
54
  for i in response.json().get("value", []):
61
55
  conn = i.get("connectionDetails", {})
62
56
  new_data = {
@@ -15,14 +15,14 @@ from sempy_labs._helper_functions import (
15
15
  resolve_lakehouse_name,
16
16
  language_validate,
17
17
  resolve_workspace_name_and_id,
18
- lro,
19
18
  _decode_b64,
20
19
  resolve_dataset_id,
20
+ _update_dataframe_datatypes,
21
+ _base_api,
21
22
  )
22
23
  from typing import List, Optional, Union
23
24
  from sempy._utils._log import log
24
25
  import sempy_labs._icons as icons
25
- from sempy.fabric.exceptions import FabricHTTPException
26
26
  from uuid import UUID
27
27
 
28
28
 
@@ -57,12 +57,12 @@ def get_report_json(
57
57
  report_id = resolve_report_id(report=report, workspace=workspace_id)
58
58
  fmt = "PBIR-Legacy"
59
59
 
60
- client = fabric.FabricRestClient()
61
- response = client.post(
62
- f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition?format={fmt}"
60
+ result = _base_api(
61
+ request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition?format={fmt}",
62
+ method="post",
63
+ lro_return_json=True,
64
+ status_codes=None,
63
65
  )
64
-
65
- result = lro(client, response).json()
66
66
  df_items = pd.json_normalize(result["definition"]["parts"])
67
67
  df_items_filt = df_items[df_items["path"] == "report.json"]
68
68
  payload = df_items_filt["payload"].iloc[0]
@@ -284,7 +284,6 @@ def export_report(
284
284
  )
285
285
 
286
286
  reportId = dfI_filt["Id"].iloc[0]
287
- client = fabric.PowerBIRestClient()
288
287
 
289
288
  if (
290
289
  export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
@@ -362,31 +361,35 @@ def export_report(
362
361
  ]
363
362
 
364
363
  base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
365
- response = client.post(f"{base_url}/ExportTo", json=request_body)
366
-
367
- if response.status_code == 202:
368
- response_body = json.loads(response.content)
369
- export_id = response_body["id"]
370
- response = client.get(f"{base_url}/exports/{export_id}")
364
+ response = _base_api(
365
+ request=f"{base_url}/ExportTo",
366
+ method="post",
367
+ payload=request_body,
368
+ status_codes=202,
369
+ )
370
+ export_id = json.loads(response.content).get("id")
371
+
372
+ get_status_url = f"{base_url}/exports/{export_id}"
373
+ response = _base_api(request=get_status_url, status_codes=[200, 202])
374
+ response_body = json.loads(response.content)
375
+ while response_body["status"] not in ["Succeeded", "Failed"]:
376
+ time.sleep(3)
377
+ response = _base_api(request=get_status_url, status_codes=[200, 202])
371
378
  response_body = json.loads(response.content)
372
- while response_body["status"] not in ["Succeeded", "Failed"]:
373
- time.sleep(3)
374
- response = client.get(f"{base_url}/exports/{export_id}")
375
- response_body = json.loads(response.content)
376
- if response_body["status"] == "Failed":
377
- raise ValueError(
378
- f"{icons.red_dot} The export for the '{report}' report within the '{workspace_name}' workspace in the '{export_format}' format has failed."
379
- )
380
- else:
381
- response = client.get(f"{base_url}/exports/{export_id}/file")
382
- print(
383
- f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace to the lakehouse..."
384
- )
385
- with open(filePath, "wb") as export_file:
386
- export_file.write(response.content)
387
- print(
388
- f"{icons.green_dot} The '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace has been saved to the following location: '{filePath}'."
389
- )
379
+ if response_body["status"] == "Failed":
380
+ raise ValueError(
381
+ f"{icons.red_dot} The export for the '{report}' report within the '{workspace_name}' workspace in the '{export_format}' format has failed."
382
+ )
383
+ else:
384
+ response = _base_api(request=f"{get_status_url}/file")
385
+ print(
386
+ f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace to the lakehouse..."
387
+ )
388
+ with open(filePath, "wb") as export_file:
389
+ export_file.write(response.content)
390
+ print(
391
+ f"{icons.green_dot} The '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace has been saved to the following location: '{filePath}'."
392
+ )
390
393
 
391
394
 
392
395
  def clone_report(
@@ -452,20 +455,17 @@ def clone_report(
452
455
  f"{icons.warning} The 'report' and 'cloned_report' parameters have the same value of '{report}. The 'workspace' and 'target_workspace' have the same value of '{workspace_name}'. Either the 'cloned_report' or the 'target_workspace' must be different from the original report."
453
456
  )
454
457
 
455
- client = fabric.PowerBIRestClient()
456
-
457
- request_body = {"name": cloned_report}
458
+ payload = {"name": cloned_report}
458
459
  if target_dataset is not None:
459
- request_body["targetModelId"] = target_dataset_id
460
+ payload["targetModelId"] = target_dataset_id
460
461
  if target_workspace != workspace_name:
461
- request_body["targetWorkspaceId"] = target_workspace_id
462
+ payload["targetWorkspaceId"] = target_workspace_id
462
463
 
463
- response = client.post(
464
- f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone", json=request_body
464
+ _base_api(
465
+ request=f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone",
466
+ method="post",
467
+ payload=payload,
465
468
  )
466
-
467
- if response.status_code != 200:
468
- raise FabricHTTPException(response)
469
469
  print(
470
470
  f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the '{target_workspace}' workspace."
471
471
  )
@@ -554,9 +554,14 @@ def list_report_pages(report: str, workspace: Optional[str | UUID] = None):
554
554
  }
555
555
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
556
556
 
557
- df["Hidden"] = df["Hidden"].astype(bool)
558
- intCol = ["Width", "Height", "Visual Count"]
559
- df[intCol] = df[intCol].astype(int)
557
+ column_map = {
558
+ "Hidden": "bool",
559
+ "Width": "int",
560
+ "Height": "int",
561
+ "Visual Count": "int",
562
+ }
563
+
564
+ _update_dataframe_datatypes(dataframe=df, column_map=column_map)
560
565
 
561
566
  return df
562
567
 
@@ -38,6 +38,8 @@ def list_unused_objects_in_reports(
38
38
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
39
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
40
40
 
41
+ fabric.refresh_tom_cache(workspace=workspace)
42
+
41
43
  dfR = _list_all_report_semantic_model_objects(
42
44
  dataset=dataset_id, workspace=workspace_id
43
45
  )
@@ -3,11 +3,11 @@ from sempy_labs._helper_functions import (
3
3
  resolve_dataset_id,
4
4
  resolve_workspace_name_and_id,
5
5
  resolve_report_id,
6
+ _base_api,
6
7
  )
7
8
  from typing import Optional, List
8
9
  from sempy._utils._log import log
9
10
  import sempy_labs._icons as icons
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -46,8 +46,6 @@ def report_rebind(
46
46
  if dataset_workspace is None:
47
47
  dataset_workspace = report_workspace_name
48
48
 
49
- client = fabric.PowerBIRestClient()
50
-
51
49
  if isinstance(report, str):
52
50
  report = [report]
53
51
 
@@ -55,16 +53,14 @@ def report_rebind(
55
53
  report_id = resolve_report_id(report=rpt, workspace=report_workspace_id)
56
54
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
57
55
 
58
- # Prepare API
59
- request_body = {"datasetId": dataset_id}
56
+ payload = {"datasetId": dataset_id}
60
57
 
61
- response = client.post(
62
- f"/v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
63
- json=request_body,
58
+ _base_api(
59
+ request=f"v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
60
+ method="post",
61
+ json=payload,
64
62
  )
65
63
 
66
- if response.status_code != 200:
67
- raise FabricHTTPException(response)
68
64
  print(
69
65
  f"{icons.green_dot} The '{rpt}' report has been successfully rebinded to the '{dataset}' semantic model."
70
66
  )