semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -7,14 +7,15 @@ from sempy_labs._helper_functions import (
7
7
  from typing import Optional
8
8
  import sempy_labs._icons as icons
9
9
  from sempy.fabric.exceptions import FabricHTTPException
10
+ from uuid import UUID
10
11
 
11
12
 
12
13
  def create_shortcut_onelake(
13
14
  table_name: str,
14
15
  source_lakehouse: str,
15
- source_workspace: str,
16
+ source_workspace: str | UUID,
16
17
  destination_lakehouse: str,
17
- destination_workspace: Optional[str] = None,
18
+ destination_workspace: Optional[str | UUID] = None,
18
19
  shortcut_name: Optional[str] = None,
19
20
  ):
20
21
  """
@@ -28,28 +29,44 @@ def create_shortcut_onelake(
28
29
  The table name for which a shortcut will be created.
29
30
  source_lakehouse : str
30
31
  The Fabric lakehouse in which the table resides.
31
- source_workspace : str
32
- The name of the Fabric workspace in which the source lakehouse exists.
32
+ source_workspace : str | uuid.UUID
33
+ The name or ID of the Fabric workspace in which the source lakehouse exists.
33
34
  destination_lakehouse : str
34
35
  The Fabric lakehouse in which the shortcut will be created.
35
- destination_workspace : str, default=None
36
- The name of the Fabric workspace in which the shortcut will be created.
36
+ destination_workspace : str | uuid.UUID, default=None
37
+ The name or ID of the Fabric workspace in which the shortcut will be created.
37
38
  Defaults to None which resolves to the workspace of the attached lakehouse
38
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
39
40
  shortcut_name : str, default=None
40
41
  The name of the shortcut 'table' to be created. This defaults to the 'table_name' parameter value.
41
42
  """
42
43
 
43
- sourceWorkspaceId = fabric.resolve_workspace_id(source_workspace)
44
- sourceLakehouseId = resolve_lakehouse_id(source_lakehouse, source_workspace)
44
+ (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
45
+ source_workspace
46
+ )
47
+ source_lakehouse_id = resolve_lakehouse_id(source_lakehouse, source_workspace_id)
48
+ source_lakehouse_name = fabric.resolve_item_name(
49
+ item_id=source_lakehouse_id, type="Lakehouse", workspace=source_workspace_id
50
+ )
45
51
 
46
52
  if destination_workspace is None:
47
- destination_workspace = source_workspace
53
+ destination_workspace_name = source_workspace_name
54
+ destination_workspace_id = source_workspace_id
55
+ else:
56
+ destination_workspace_name = destination_workspace
57
+ destination_workspace_id = fabric.resolve_workspace_id(
58
+ destination_workspace_name
59
+ )
48
60
 
49
- destinationWorkspaceId = fabric.resolve_workspace_id(destination_workspace)
50
- destinationLakehouseId = resolve_lakehouse_id(
61
+ destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
62
+ destination_lakehouse_id = resolve_lakehouse_id(
51
63
  destination_lakehouse, destination_workspace
52
64
  )
65
+ destination_lakehouse_name = fabric.resolve_item_name(
66
+ item_id=destination_lakehouse_id,
67
+ type="Lakehouse",
68
+ workspace=destination_workspace_id,
69
+ )
53
70
 
54
71
  if shortcut_name is None:
55
72
  shortcut_name = table_name
@@ -62,8 +79,8 @@ def create_shortcut_onelake(
62
79
  "name": shortcut_name.replace(" ", ""),
63
80
  "target": {
64
81
  "oneLake": {
65
- "workspaceId": sourceWorkspaceId,
66
- "itemId": sourceLakehouseId,
82
+ "workspaceId": source_workspace_id,
83
+ "itemId": source_lakehouse_id,
67
84
  "path": tablePath,
68
85
  }
69
86
  },
@@ -71,13 +88,13 @@ def create_shortcut_onelake(
71
88
 
72
89
  try:
73
90
  response = client.post(
74
- f"/v1/workspaces/{destinationWorkspaceId}/items/{destinationLakehouseId}/shortcuts",
91
+ f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
75
92
  json=request_body,
76
93
  )
77
94
  if response.status_code == 201:
78
95
  print(
79
- f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse}' lakehouse within"
80
- f" the '{destination_workspace} workspace. It is based on the '{table_name}' table in the '{source_lakehouse}' lakehouse within the '{source_workspace}' workspace."
96
+ f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within"
97
+ f" the '{destination_workspace_name} workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
81
98
  )
82
99
  else:
83
100
  print(response.status_code)
@@ -166,7 +183,9 @@ def create_shortcut(
166
183
 
167
184
 
168
185
  def delete_shortcut(
169
- shortcut_name: str, lakehouse: Optional[str] = None, workspace: Optional[str] = None
186
+ shortcut_name: str,
187
+ lakehouse: Optional[str] = None,
188
+ workspace: Optional[str | UUID] = None,
170
189
  ):
171
190
  """
172
191
  Deletes a shortcut.
@@ -180,19 +199,19 @@ def delete_shortcut(
180
199
  lakehouse : str, default=None
181
200
  The Fabric lakehouse name in which the shortcut resides.
182
201
  Defaults to None which resolves to the lakehouse attached to the notebook.
183
- workspace : str, default=None
184
- The name of the Fabric workspace in which lakehouse resides.
202
+ workspace : str | UUID, default=None
203
+ The name or ID of the Fabric workspace in which lakehouse resides.
185
204
  Defaults to None which resolves to the workspace of the attached lakehouse
186
205
  or if no lakehouse attached, resolves to the workspace of the notebook.
187
206
  """
188
207
 
189
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
208
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
190
209
 
191
210
  if lakehouse is None:
192
211
  lakehouse_id = fabric.get_lakehouse_id()
193
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
212
+ lakehouse = resolve_lakehouse_name(lakehouse_id, workspace_id)
194
213
  else:
195
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
214
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
196
215
 
197
216
  client = fabric.FabricRestClient()
198
217
  response = client.delete(
@@ -202,5 +221,5 @@ def delete_shortcut(
202
221
  if response.status_code != 200:
203
222
  raise FabricHTTPException(response)
204
223
  print(
205
- f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted."
224
+ f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace_name}' workspace has been deleted."
206
225
  )
@@ -1,5 +1,4 @@
1
1
  import sempy
2
- import sempy.fabric as fabric
3
2
  import json
4
3
  import os
5
4
  import shutil
@@ -7,12 +6,17 @@ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
7
6
  from sempy._utils._log import log
8
7
  from typing import Optional
9
8
  import sempy_labs._icons as icons
9
+ from uuid import UUID
10
+ from sempy_labs._helper_functions import (
11
+ resolve_dataset_name_and_id,
12
+ resolve_workspace_name_and_id,
13
+ )
10
14
 
11
15
 
12
16
  @log
13
17
  def create_pqt_file(
14
- dataset: str,
15
- workspace: Optional[str] = None,
18
+ dataset: str | UUID,
19
+ workspace: Optional[str | UUID] = None,
16
20
  file_name: str = "PowerQueryTemplate",
17
21
  ):
18
22
  """
@@ -24,10 +28,10 @@ def create_pqt_file(
24
28
 
25
29
  Parameters
26
30
  ----------
27
- dataset : str
28
- Name of the semantic model.
29
- workspace : str, default=None
30
- The Fabric workspace name.
31
+ dataset : str | uuid.UUID
32
+ Name or ID of the semantic model.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID.
31
35
  Defaults to None which resolves to the workspace of the attached lakehouse
32
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
33
37
  file_name : str, default='PowerQueryTemplate'
@@ -43,19 +47,20 @@ def create_pqt_file(
43
47
  f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
44
48
  )
45
49
 
46
- workspace = fabric.resolve_workspace_name(workspace)
50
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
51
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
47
52
 
48
53
  folderPath = "/lakehouse/default/Files"
49
54
  subFolderPath = os.path.join(folderPath, "pqtnewfolder")
50
55
 
51
56
  with connect_semantic_model(
52
- dataset=dataset, workspace=workspace, readonly=True
57
+ dataset=dataset_id, workspace=workspace_id, readonly=True
53
58
  ) as tom:
54
59
  if not any(
55
60
  p.SourceType == TOM.PartitionSourceType.M for p in tom.all_partitions()
56
61
  ) and not any(t.RefreshPolicy for t in tom.model.Tables):
57
62
  print(
58
- f"{icons.info} The '{dataset}' semantic model within the '{workspace}' workspace has no Power Query logic."
63
+ f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no Power Query logic."
59
64
  )
60
65
  return
61
66
 
@@ -220,7 +225,7 @@ def create_pqt_file(
220
225
  shutil.rmtree(subFolderPath, ignore_errors=True)
221
226
 
222
227
  print(
223
- f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset}' semantic model in the '{workspace}' workspace within the Files section of your lakehouse."
228
+ f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset_name}' semantic model in the '{workspace_name}' workspace within the Files section of your lakehouse."
224
229
  )
225
230
 
226
231
  a = 0
@@ -7,25 +7,31 @@ from sempy_labs.tom import connect_semantic_model
7
7
  from typing import Optional
8
8
  from sempy._utils._log import log
9
9
  import sempy_labs._icons as icons
10
+ from uuid import UUID
11
+ from sempy_labs._helper_functions import (
12
+ resolve_workspace_name_and_id,
13
+ resolve_dataset_name_and_id,
14
+ )
10
15
 
11
16
 
12
17
  @log
13
- def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
18
+ def refresh_calc_tables(dataset: str | UUID, workspace: Optional[str | UUID] = None):
14
19
  """
15
20
  Recreates the delta tables in the lakehouse based on the DAX expressions stored as model annotations in the Direct Lake semantic model.
16
21
 
17
22
  Parameters
18
23
  ----------
19
- dataset : str
20
- Name of the semantic model.
21
- workspace : str, default=None
22
- The Fabric workspace name.
24
+ dataset : str | UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | UUID, default=None
27
+ The Fabric workspace name or ID.
23
28
  Defaults to None which resolves to the workspace of the attached lakehouse
24
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
30
  """
26
31
 
27
32
  spark = SparkSession.builder.getOrCreate()
28
- workspace = fabric.resolve_workspace_name(workspace)
33
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
34
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
29
35
  icons.sll_tags.append("DirectLakeMigration")
30
36
 
31
37
  @retry(
@@ -34,7 +40,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
34
40
  )
35
41
  def dyn_connect():
36
42
  with connect_semantic_model(
37
- dataset=dataset, readonly=True, workspace=workspace
43
+ dataset=dataset_id, readonly=True, workspace=workspace_id
38
44
  ) as tom:
39
45
 
40
46
  tom.model
@@ -42,7 +48,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
42
48
  dyn_connect()
43
49
 
44
50
  with connect_semantic_model(
45
- dataset=dataset, readonly=True, workspace=workspace
51
+ dataset=dataset_id, readonly=True, workspace=workspace_id
46
52
  ) as tom:
47
53
  for a in tom.model.Annotations:
48
54
  if any(a.Name == t.Name for t in tom.model.Tables):
@@ -56,9 +62,9 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
56
62
 
57
63
  try:
58
64
  df = fabric.evaluate_dax(
59
- dataset=dataset,
65
+ dataset=dataset_id,
60
66
  dax_string=daxquery,
61
- workspace=workspace,
67
+ workspace=workspace_id,
62
68
  )
63
69
 
64
70
  # Update column names for non-field parameters
@@ -7,13 +7,14 @@ from sempy_labs._helper_functions import (
7
7
  )
8
8
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
9
  from sempy.fabric.exceptions import FabricHTTPException
10
+ from uuid import UUID
10
11
 
11
12
 
12
13
  def download_report(
13
14
  report: str,
14
15
  file_name: Optional[str] = None,
15
16
  download_type: str = "LiveConnect",
16
- workspace: Optional[str] = None,
17
+ workspace: Optional[str | UUID] = None,
17
18
  ):
18
19
  """
19
20
  Downloads the specified report from the specified workspace to a Power BI .pbix file.
@@ -29,8 +30,8 @@ def download_report(
29
30
  Defaults to None which resolves to the name of the report.
30
31
  download_type : str, default="LiveConnect"
31
32
  The type of download. Valid values are "LiveConnect" and "IncludeModel".
32
- workspace : str, default=None
33
- The Fabric workspace name.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID.
34
35
  Defaults to None which resolves to the workspace of the attached lakehouse
35
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
36
37
  """
@@ -40,10 +41,11 @@ def download_report(
40
41
  f"{icons.red_dot} A lakehouse must be attached to the notebook."
41
42
  )
42
43
 
44
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
43
45
  lakehouse_id = fabric.get_lakehouse_id()
44
- workspace_name = fabric.resolve_workspace_name()
46
+ lakehouse_workspace = fabric.resolve_workspace_name()
45
47
  lakehouse_name = resolve_lakehouse_name(
46
- lakehouse_id=lakehouse_id, workspace=workspace_name
48
+ lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
47
49
  )
48
50
 
49
51
  download_types = ["LiveConnect", "IncludeModel"]
@@ -53,9 +55,8 @@ def download_report(
53
55
  )
54
56
 
55
57
  file_name = file_name or report
56
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
57
58
  report_id = fabric.resolve_item_id(
58
- item_name=report, type="Report", workspace=workspace
59
+ item_name=report, type="Report", workspace=workspace_id
59
60
  )
60
61
 
61
62
  client = fabric.PowerBIRestClient()
@@ -71,5 +72,5 @@ def download_report(
71
72
  file.write(response.content)
72
73
 
73
74
  print(
74
- f"{icons.green_dot} The '{report}' report within the '{workspace}' workspace has been exported as the '{file_name}' file in the '{lakehouse_name}' lakehouse within the '{workspace_name} workspace."
75
+ f"{icons.green_dot} The '{report}' report within the '{workspace_name}' workspace has been exported as the '{file_name}' file in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
75
76
  )
@@ -7,18 +7,22 @@ from sempy_labs._helper_functions import (
7
7
  resolve_workspace_name_and_id,
8
8
  _conv_b64,
9
9
  resolve_report_id,
10
+ resolve_dataset_name_and_id,
11
+ resolve_item_name_and_id,
10
12
  lro,
11
13
  )
12
14
  import sempy_labs._icons as icons
13
15
  from sempy._utils._log import log
16
+ from uuid import UUID
17
+ from sempy.fabric.exceptions import FabricHTTPException
14
18
 
15
19
 
16
20
  def create_report_from_reportjson(
17
21
  report: str,
18
- dataset: str,
22
+ dataset: str | UUID,
19
23
  report_json: dict,
20
24
  theme_json: Optional[dict] = None,
21
- workspace: Optional[str] = None,
25
+ workspace: Optional[str | UUID] = None,
22
26
  ):
23
27
  """
24
28
  Creates a report based on a report.json file (and an optional themes.json file).
@@ -29,36 +33,27 @@ def create_report_from_reportjson(
29
33
  ----------
30
34
  report : str
31
35
  Name of the report.
32
- dataset : str
33
- Name of the semantic model to connect to the report.
36
+ dataset : str | uuid.UUID
37
+ Name or ID of the semantic model to connect to the report.
34
38
  report_json : dict
35
39
  The report.json file to be used to create the report.
36
40
  theme_json : dict, default=None
37
41
  The theme.json file to be used for the theme of the report.
38
- workspace : str, default=None
39
- The Fabric workspace name.
42
+ workspace : str | uuid.UUID, default=None
43
+ The Fabric workspace name or ID.
40
44
  Defaults to None which resolves to the workspace of the attached lakehouse
41
45
  or if no lakehouse attached, resolves to the workspace of the notebook.
42
46
  """
43
47
 
44
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
45
-
46
- dfI = fabric.list_items(workspace=workspace)
47
-
48
- dfI_model = dfI[(dfI["Display Name"] == dataset) & (dfI["Type"] == "SemanticModel")]
49
-
50
- if len(dfI_model) == 0:
51
- raise ValueError(
52
- f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
53
- )
48
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
49
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
54
50
 
55
- datasetId = dfI_model["Id"].iloc[0]
51
+ dfI = fabric.list_items(workspace=workspace, type="Report")
52
+ dfI_rpt = dfI[dfI["Display Name"] == report]
56
53
 
57
- dfI_rpt = dfI[(dfI["Display Name"] == report) & (dfI["Type"] == "Report")]
58
-
59
- if len(dfI_rpt) > 0:
54
+ if not dfI_rpt.empty:
60
55
  print(
61
- f"{icons.yellow_dot} '{report}' already exists as a report in the '{workspace}' workspace."
56
+ f"{icons.yellow_dot} '{report}' report already exists in the '{workspace_name}' workspace."
62
57
  )
63
58
  return
64
59
 
@@ -71,7 +66,7 @@ def create_report_from_reportjson(
71
66
  "connectionString": None,
72
67
  "pbiServiceModelId": None,
73
68
  "pbiModelVirtualServerName": "sobe_wowvirtualserver",
74
- "pbiModelDatabaseName": datasetId,
69
+ "pbiModelDatabaseName": dataset_id,
75
70
  "name": "EntityDataSource",
76
71
  "connectionType": "pbiServiceXmlaStyleLive",
77
72
  },
@@ -116,12 +111,12 @@ def create_report_from_reportjson(
116
111
  lro(client, response, status_codes=[201, 202], return_status_code=True)
117
112
 
118
113
  print(
119
- f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace."
114
+ f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace_name}' workspace."
120
115
  )
121
116
 
122
117
 
123
118
  def update_report_from_reportjson(
124
- report: str, report_json: dict, workspace: Optional[str] = None
119
+ report: str, report_json: dict, workspace: Optional[str | UUID] = None
125
120
  ):
126
121
  """
127
122
  Updates a report based on a report.json file.
@@ -134,17 +129,17 @@ def update_report_from_reportjson(
134
129
  Name of the report.
135
130
  report_json : dict
136
131
  The report.json file to be used to update the report.
137
- workspace : str, default=None
138
- The Fabric workspace name in which the report resides.
132
+ workspace : str | uuid.UUID, default=None
133
+ The Fabric workspace name or ID in which the report resides.
139
134
  Defaults to None which resolves to the workspace of the attached lakehouse
140
135
  or if no lakehouse attached, resolves to the workspace of the notebook.
141
136
  """
142
137
 
143
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
144
- report_id = resolve_report_id(report=report, workspace=workspace)
138
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
139
+ report_id = resolve_report_id(report=report, workspace=workspace_id)
145
140
 
146
141
  # Get the existing PBIR file
147
- df_items = get_report_definition(report=report, workspace=workspace)
142
+ df_items = get_report_definition(report=report, workspace=workspace_id)
148
143
  df_items_filt = df_items[df_items["path"] == "definition.pbir"]
149
144
  rptDefFile = df_items_filt["payload"].iloc[0]
150
145
  payloadReportJson = _conv_b64(report_json)
@@ -175,12 +170,12 @@ def update_report_from_reportjson(
175
170
  lro(client, response, return_status_code=True)
176
171
 
177
172
  print(
178
- f"{icons.green_dot} The '{report}' report within the '{workspace}' workspace has been successfully updated."
173
+ f"{icons.green_dot} The '{report}' report within the '{workspace_name}' workspace has been successfully updated."
179
174
  )
180
175
 
181
176
 
182
177
  def get_report_definition(
183
- report: str, workspace: Optional[str] = None, return_dataframe: bool = True
178
+ report: str, workspace: Optional[str | UUID] = None, return_dataframe: bool = True
184
179
  ) -> pd.DataFrame | dict:
185
180
  """
186
181
  Gets the collection of definition files of a report.
@@ -191,8 +186,8 @@ def get_report_definition(
191
186
  ----------
192
187
  report : str
193
188
  Name of the report.
194
- workspace : str, default=None
195
- The Fabric workspace name in which the report resides.
189
+ workspace : str | uuid.UUID, default=None
190
+ The Fabric workspace name or ID in which the report resides.
196
191
  Defaults to None which resolves to the workspace of the attached lakehouse
197
192
  or if no lakehouse attached, resolves to the workspace of the notebook.
198
193
  return_dataframe : bool, default=True
@@ -204,9 +199,9 @@ def get_report_definition(
204
199
  The collection of report definition files within a pandas dataframe.
205
200
  """
206
201
 
207
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
202
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
208
203
 
209
- report_id = resolve_report_id(report=report, workspace=workspace)
204
+ report_id = resolve_report_id(report=report, workspace=workspace_id)
210
205
  client = fabric.FabricRestClient()
211
206
  response = client.post(
212
207
  f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
@@ -224,7 +219,7 @@ def get_report_definition(
224
219
  def create_model_bpa_report(
225
220
  report: Optional[str] = icons.model_bpa_name,
226
221
  dataset: Optional[str] = icons.model_bpa_name,
227
- dataset_workspace: Optional[str] = None,
222
+ dataset_workspace: Optional[str | UUID] = None,
228
223
  ):
229
224
  """
230
225
  Dynamically generates a Best Practice Analyzer report for analyzing semantic models.
@@ -237,24 +232,27 @@ def create_model_bpa_report(
237
232
  dataset : str, default='ModelBPA'
238
233
  Name of the semantic model which feeds this report.
239
234
  Defaults to 'ModelBPA'
240
- dataset_workspace : str, default=None
241
- The Fabric workspace name in which the semantic model resides.
235
+ dataset_workspace : str | uuid.UUID, default=None
236
+ The Fabric workspace name or ID in which the semantic model resides.
242
237
  Defaults to None which resolves to the workspace of the attached lakehouse
243
238
  or if no lakehouse attached, resolves to the workspace of the notebook.
244
239
 
245
240
  """
246
-
247
241
  # from sempy_labs._helper_functions import resolve_dataset_id
248
242
 
249
- dfI = fabric.list_items(workspace=dataset_workspace, type="SemanticModel")
243
+ (dataset_workspace_name, dataset_workspace_id) = resolve_workspace_name_and_id(
244
+ dataset_workspace
245
+ )
246
+
247
+ dfI = fabric.list_items(workspace=dataset_workspace_id, type="SemanticModel")
250
248
  dfI_filt = dfI[dfI["Display Name"] == dataset]
251
249
 
252
250
  if len(dfI_filt) == 0:
253
251
  raise ValueError(
254
- f"The '{dataset}' semantic model does not exist within the '{dataset_workspace}' workspace."
252
+ f"The '{dataset}' semantic model does not exist within the '{dataset_workspace_name}' workspace."
255
253
  )
256
254
 
257
- dfR = fabric.list_reports(workspace=dataset_workspace)
255
+ dfR = fabric.list_reports(workspace=dataset_workspace_id)
258
256
  dfR_filt = dfR[dfR["Name"] == report]
259
257
  # dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
260
258
 
@@ -306,14 +304,14 @@ def create_model_bpa_report(
306
304
 
307
305
  if len(dfR_filt) > 0:
308
306
  update_report_from_reportjson(
309
- report=report, report_json=report_json, workspace=dataset_workspace
307
+ report=report, report_json=report_json, workspace=dataset_workspace_id
310
308
  )
311
309
  else:
312
310
  create_report_from_reportjson(
313
311
  report=report,
314
312
  dataset=dataset,
315
313
  report_json=report_json,
316
- workspace=dataset_workspace,
314
+ workspace=dataset_workspace_id,
317
315
  )
318
316
 
319
317
 
@@ -375,3 +373,46 @@ def _create_report(
375
373
  report_workspace=report_workspace,
376
374
  dataset_workspace=dataset_workspace,
377
375
  )
376
+
377
+
378
+ def _get_report(
379
+ report: str | UUID, workspace: Optional[str | UUID] = None
380
+ ) -> pd.DataFrame:
381
+
382
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
383
+ (report_name, report_id) = resolve_item_name_and_id(
384
+ item=report, type="Report", workspace=workspace
385
+ )
386
+
387
+ client = fabric.FabricRestClient()
388
+ response = client.get(f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}")
389
+
390
+ if response.status_code != 200:
391
+ raise FabricHTTPException(response)
392
+
393
+ result = response.json()
394
+
395
+ new_data = {
396
+ "Id": result.get("id"),
397
+ "Report Type": result.get("reportType"),
398
+ "Name": result.get("name"),
399
+ "Web Url": result.get("webUrl"),
400
+ "Embed Url": result.get("embedUrl"),
401
+ "Is From Pbix": result.get("isFromPbix"),
402
+ "Is Owned By Me": result.get("isOwnedByMe"),
403
+ "Dataset Id": result.get("datasetId"),
404
+ "Dataset Workspace Id": result.get("datasetWorkspaceId"),
405
+ "Users": result.get("users") if result.get("users") is not None else [],
406
+ "Subscriptions": (
407
+ result.get("subscriptions")
408
+ if result.get("subscriptions") is not None
409
+ else []
410
+ ),
411
+ }
412
+
413
+ df = pd.DataFrame([new_data])
414
+
415
+ bool_cols = ["Is From Pbix", "Is Owned By Me"]
416
+ df[bool_cols] = df[bool_cols].astype(bool)
417
+
418
+ return df
@@ -2,11 +2,15 @@ import sempy.fabric as fabric
2
2
  from typing import Optional
3
3
  from sempy.fabric.exceptions import FabricHTTPException
4
4
  import pandas as pd
5
+ from uuid import UUID
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ )
5
9
 
6
10
 
7
11
  def get_report_datasources(
8
12
  report: str,
9
- workspace: Optional[str] = None,
13
+ workspace: Optional[str | UUID] = None,
10
14
  ) -> pd.DataFrame:
11
15
  """
12
16
  Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
@@ -15,8 +19,8 @@ def get_report_datasources(
15
19
  ----------
16
20
  report : str | List[str]
17
21
  Name(s) of the Power BI report(s).
18
- workspace : str, default=None
19
- The name of the Fabric workspace in which the report resides.
22
+ workspace : str | uuid.UUID, default=None
23
+ The name or ID of the Fabric workspace in which the report resides.
20
24
  Defaults to None which resolves to the workspace of the attached lakehouse
21
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
22
26
 
@@ -38,14 +42,10 @@ def get_report_datasources(
38
42
  ]
39
43
  )
40
44
 
41
- if workspace is None:
42
- workspace_id = fabric.get_workspace_id()
43
- workspace = fabric.resolve_workspace_name(workspace_id)
44
- else:
45
- workspace_id = fabric.resolve_workspace_id(workspace)
45
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
46
46
 
47
47
  report_id = fabric.resolve_item_id(
48
- item_name=report, type="PaginatedReport", workspace=workspace
48
+ item_name=report, type="PaginatedReport", workspace=workspace_id
49
49
  )
50
50
 
51
51
  client = fabric.PowerBIRestClient()