semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
sempy_labs/_vertipaq.py CHANGED
@@ -10,10 +10,11 @@ from pyspark.sql import SparkSession
10
10
  from sempy_labs._helper_functions import (
11
11
  format_dax_object_name,
12
12
  resolve_lakehouse_name,
13
- resolve_dataset_id,
14
13
  save_as_delta_table,
15
14
  resolve_workspace_capacity,
16
15
  _get_max_run_id,
16
+ resolve_workspace_name_and_id,
17
+ resolve_dataset_name_and_id,
17
18
  )
18
19
  from sempy_labs._list_functions import list_relationships, list_tables
19
20
  from sempy_labs.lakehouse import lakehouse_attached, get_lakehouse_tables
@@ -22,12 +23,13 @@ from typing import Optional
22
23
  from sempy._utils._log import log
23
24
  import sempy_labs._icons as icons
24
25
  from pathlib import Path
26
+ from uuid import UUID
25
27
 
26
28
 
27
29
  @log
28
30
  def vertipaq_analyzer(
29
- dataset: str,
30
- workspace: Optional[str] = None,
31
+ dataset: str | UUID,
32
+ workspace: Optional[str | UUID] = None,
31
33
  export: Optional[str] = None,
32
34
  read_stats_from_data: bool = False,
33
35
  **kwargs,
@@ -37,10 +39,10 @@ def vertipaq_analyzer(
37
39
 
38
40
  Parameters
39
41
  ----------
40
- dataset : str
41
- Name of the semantic model.
42
- workspace : str, default=None
43
- The Fabric workspace name in which the semantic model exists.
42
+ dataset : str | uuid.UUID
43
+ Name or ID of the semantic model.
44
+ workspace : str| uuid.UUID, default=None
45
+ The Fabric workspace name or ID in which the semantic model exists.
44
46
  Defaults to None which resolves to the workspace of the attached lakehouse
45
47
  or if no lakehouse attached, resolves to the workspace of the notebook.
46
48
  export : str, default=None
@@ -49,10 +51,6 @@ def vertipaq_analyzer(
49
51
  Default value: None.
50
52
  read_stats_from_data : bool, default=False
51
53
  Setting this parameter to true has the function get Column Cardinality and Missing Rows using DAX (Direct Lake semantic models achieve this using a Spark query to the lakehouse).
52
-
53
- Returns
54
- -------
55
-
56
54
  """
57
55
 
58
56
  from sempy_labs.tom import connect_semantic_model
@@ -68,7 +66,8 @@ def vertipaq_analyzer(
68
66
  "ignore", message="createDataFrame attempted Arrow optimization*"
69
67
  )
70
68
 
71
- workspace = fabric.resolve_workspace_name(workspace)
69
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
70
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
72
71
 
73
72
  vertipaq_map = {
74
73
  "Model": {
@@ -135,7 +134,7 @@ def vertipaq_analyzer(
135
134
  }
136
135
 
137
136
  with connect_semantic_model(
138
- dataset=dataset, workspace=workspace, readonly=True
137
+ dataset=dataset_id, workspace=workspace_id, readonly=True
139
138
  ) as tom:
140
139
  compat_level = tom.model.Model.Database.CompatibilityLevel
141
140
  is_direct_lake = tom.is_direct_lake()
@@ -144,24 +143,28 @@ def vertipaq_analyzer(
144
143
  column_count = len(list(tom.all_columns()))
145
144
  if table_count == 0:
146
145
  print(
147
- f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace has no tables. Vertipaq Analyzer can only be run if the semantic model has tables."
146
+ f"{icons.warning} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no tables. Vertipaq Analyzer can only be run if the semantic model has tables."
148
147
  )
149
148
  return
150
149
 
151
- dfT = list_tables(dataset=dataset, extended=True, workspace=workspace)
150
+ dfT = list_tables(dataset=dataset_id, extended=True, workspace=workspace_id)
152
151
 
153
152
  dfT.rename(columns={"Name": "Table Name"}, inplace=True)
154
153
  columns_to_keep = list(vertipaq_map["Tables"].keys())
155
154
  dfT = dfT[dfT.columns.intersection(columns_to_keep)]
156
155
 
157
- dfC = fabric.list_columns(dataset=dataset, extended=True, workspace=workspace)
156
+ dfC = fabric.list_columns(dataset=dataset_id, extended=True, workspace=workspace_id)
158
157
  dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
159
158
  dfC.rename(columns={"Column Cardinality": "Cardinality"}, inplace=True)
160
- dfH = fabric.list_hierarchies(dataset=dataset, extended=True, workspace=workspace)
161
- dfR = list_relationships(dataset=dataset, extended=True, workspace=workspace)
162
- dfP = fabric.list_partitions(dataset=dataset, extended=True, workspace=workspace)
159
+ dfH = fabric.list_hierarchies(
160
+ dataset=dataset_id, extended=True, workspace=workspace_id
161
+ )
162
+ dfR = list_relationships(dataset=dataset_id, extended=True, workspace=workspace_id)
163
+ dfP = fabric.list_partitions(
164
+ dataset=dataset_id, extended=True, workspace=workspace_id
165
+ )
163
166
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
164
- get_direct_lake_source(dataset=dataset, workspace=workspace)
167
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
165
168
  )
166
169
 
167
170
  dfR["Missing Rows"] = 0
@@ -308,7 +311,7 @@ def vertipaq_analyzer(
308
311
  query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),userelationship({fromObject},{toObject}),isblank({toObject}))\n)"
309
312
 
310
313
  result = fabric.evaluate_dax(
311
- dataset=dataset, dax_string=query, workspace=workspace
314
+ dataset=dataset_id, dax_string=query, workspace=workspace_id
312
315
  )
313
316
 
314
317
  try:
@@ -407,7 +410,7 @@ def vertipaq_analyzer(
407
410
 
408
411
  dfModel = pd.DataFrame(
409
412
  {
410
- "Dataset Name": dataset,
413
+ "Dataset Name": dataset_name,
411
414
  "Total Size": y,
412
415
  "Table Count": table_count,
413
416
  "Column Count": column_count,
@@ -532,19 +535,19 @@ def vertipaq_analyzer(
532
535
  f"{icons.in_progress} Saving Vertipaq Analyzer to delta tables in the lakehouse...\n"
533
536
  )
534
537
  now = datetime.datetime.now()
535
- dfD = fabric.list_datasets(workspace=workspace, mode="rest")
536
- dfD_filt = dfD[dfD["Dataset Name"] == dataset]
538
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
539
+ dfD_filt = dfD[dfD["Dataset Id"] == dataset_id]
537
540
  configured_by = dfD_filt["Configured By"].iloc[0]
538
- capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace)
541
+ capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id)
539
542
 
540
543
  for key_name, (obj, df) in dfMap.items():
541
544
  df["Capacity Name"] = capacity_name
542
545
  df["Capacity Id"] = capacity_id
543
546
  df["Configured By"] = configured_by
544
- df["Workspace Name"] = workspace
545
- df["Workspace Id"] = fabric.resolve_workspace_id(workspace)
546
- df["Dataset Name"] = dataset
547
- df["Dataset Id"] = resolve_dataset_id(dataset, workspace)
547
+ df["Workspace Name"] = workspace_name
548
+ df["Workspace Id"] = workspace_id
549
+ df["Dataset Name"] = dataset_name
550
+ df["Dataset Id"] = dataset_id
548
551
  df["RunId"] = runId
549
552
  df["Timestamp"] = now
550
553
 
@@ -605,7 +608,7 @@ def vertipaq_analyzer(
605
608
  "dfH_filt": dfH_filt,
606
609
  }
607
610
 
608
- zipFileName = f"{workspace}.{dataset}.zip"
611
+ zipFileName = f"{workspace_name}.{dataset_name}.zip"
609
612
 
610
613
  folderPath = "/lakehouse/default/Files"
611
614
  subFolderPath = os.path.join(folderPath, "VertipaqAnalyzer")
@@ -631,7 +634,7 @@ def vertipaq_analyzer(
631
634
  if os.path.exists(filePath):
632
635
  os.remove(filePath)
633
636
  print(
634
- f"{icons.green_dot} The Vertipaq Analyzer info for the '{dataset}' semantic model in the '{workspace}' workspace has been saved "
637
+ f"{icons.green_dot} The Vertipaq Analyzer info for the '{dataset_name}' semantic model in the '{workspace_name}' workspace has been saved "
635
638
  f"to the 'Vertipaq Analyzer/{zipFileName}' in the default lakehouse attached to this notebook."
636
639
  )
637
640
 
sempy_labs/_warehouses.py CHANGED
@@ -8,13 +8,14 @@ import pandas as pd
8
8
  from typing import Optional
9
9
  import sempy_labs._icons as icons
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
14
  def create_warehouse(
14
15
  warehouse: str,
15
16
  description: Optional[str] = None,
16
17
  case_insensitive_collation: bool = False,
17
- workspace: Optional[str] = None,
18
+ workspace: Optional[str | UUID] = None,
18
19
  ):
19
20
  """
20
21
  Creates a Fabric warehouse.
@@ -29,13 +30,13 @@ def create_warehouse(
29
30
  A description of the warehouse.
30
31
  case_insensitive_collation: bool, default=False
31
32
  If True, creates the warehouse with case-insensitive collation.
32
- workspace : str, default=None
33
- The Fabric workspace name.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID.
34
35
  Defaults to None which resolves to the workspace of the attached lakehouse
35
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
36
37
  """
37
38
 
38
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
40
 
40
41
  request_body = {"displayName": warehouse}
41
42
 
@@ -55,11 +56,11 @@ def create_warehouse(
55
56
  lro(client, response, status_codes=[201, 202])
56
57
 
57
58
  print(
58
- f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
59
+ f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
59
60
  )
60
61
 
61
62
 
62
- def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
63
+ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
63
64
  """
64
65
  Shows the warehouses within a workspace.
65
66
 
@@ -67,8 +68,8 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
67
68
 
68
69
  Parameters
69
70
  ----------
70
- workspace : str, default=None
71
- The Fabric workspace name.
71
+ workspace : str | uuid.UUID, default=None
72
+ The Fabric workspace name or ID.
72
73
  Defaults to None which resolves to the workspace of the attached lakehouse
73
74
  or if no lakehouse attached, resolves to the workspace of the notebook.
74
75
 
@@ -89,7 +90,7 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
89
90
  ]
90
91
  )
91
92
 
92
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
93
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
93
94
 
94
95
  client = fabric.FabricRestClient()
95
96
  response = client.get(f"/v1/workspaces/{workspace_id}/warehouses")
@@ -115,7 +116,7 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
115
116
  return df
116
117
 
117
118
 
118
- def delete_warehouse(name: str, workspace: Optional[str] = None):
119
+ def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
119
120
  """
120
121
  Deletes a Fabric warehouse.
121
122
 
@@ -125,16 +126,16 @@ def delete_warehouse(name: str, workspace: Optional[str] = None):
125
126
  ----------
126
127
  name: str
127
128
  Name of the warehouse.
128
- workspace : str, default=None
129
- The Fabric workspace name.
129
+ workspace : str | uuid.UUID, default=None
130
+ The Fabric workspace name or ID.
130
131
  Defaults to None which resolves to the workspace of the attached lakehouse
131
132
  or if no lakehouse attached, resolves to the workspace of the notebook.
132
133
  """
133
134
 
134
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
135
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
135
136
 
136
137
  item_id = fabric.resolve_item_id(
137
- item_name=name, type="Warehouse", workspace=workspace
138
+ item_name=name, type="Warehouse", workspace=workspace_id
138
139
  )
139
140
 
140
141
  client = fabric.FabricRestClient()
@@ -144,12 +145,12 @@ def delete_warehouse(name: str, workspace: Optional[str] = None):
144
145
  raise FabricHTTPException(response)
145
146
 
146
147
  print(
147
- f"{icons.green_dot} The '{name}' warehouse within the '{workspace}' workspace has been deleted."
148
+ f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
148
149
  )
149
150
 
150
151
 
151
152
  def get_warehouse_tables(
152
- warehouse: str, workspace: Optional[str] = None
153
+ warehouse: str, workspace: Optional[str | UUID] = None
153
154
  ) -> pd.DataFrame:
154
155
  """
155
156
  Shows a list of the tables in the Fabric warehouse. This function is based on INFORMATION_SCHEMA.TABLES.
@@ -158,8 +159,8 @@ def get_warehouse_tables(
158
159
  ----------
159
160
  warehouse : str
160
161
  Name of the Fabric warehouse.
161
- workspace : str, default=None
162
- The Fabric workspace name.
162
+ workspace : str | uuid.UUID, default=None
163
+ The Fabric workspace name or ID.
163
164
  Defaults to None which resolves to the workspace of the attached lakehouse
164
165
  or if no lakehouse attached, resolves to the workspace of the notebook.
165
166
 
@@ -184,7 +185,7 @@ def get_warehouse_tables(
184
185
 
185
186
 
186
187
  def get_warehouse_columns(
187
- warehouse: str, workspace: Optional[str] = None
188
+ warehouse: str, workspace: Optional[str | UUID] = None
188
189
  ) -> pd.DataFrame:
189
190
  """
190
191
  Shows a list of the columns in each table within the Fabric warehouse. This function is based on INFORMATION_SCHEMA.COLUMNS.
@@ -193,8 +194,8 @@ def get_warehouse_columns(
193
194
  ----------
194
195
  warehouse : str
195
196
  Name of the Fabric warehouse.
196
- workspace : str, default=None
197
- The Fabric workspace name.
197
+ workspace : str | uuid.UUID, default=None
198
+ The Fabric workspace name or ID.
198
199
  Defaults to None which resolves to the workspace of the attached lakehouse
199
200
  or if no lakehouse attached, resolves to the workspace of the notebook.
200
201
 
@@ -6,9 +6,10 @@ from sempy_labs._helper_functions import (
6
6
  from typing import Optional
7
7
  import sempy_labs._icons as icons
8
8
  from sempy.fabric.exceptions import FabricHTTPException
9
+ from uuid import UUID
9
10
 
10
11
 
11
- def provision_workspace_identity(workspace: Optional[str] = None):
12
+ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
12
13
  """
13
14
  Provisions a workspace identity for a workspace.
14
15
 
@@ -16,13 +17,13 @@ def provision_workspace_identity(workspace: Optional[str] = None):
16
17
 
17
18
  Parameters
18
19
  ----------
19
- workspace : str, default=None
20
- The Fabric workspace name.
20
+ workspace : str | uuid.UUID, default=None
21
+ The Fabric workspace name or ID.
21
22
  Defaults to None which resolves to the workspace of the attached lakehouse
22
23
  or if no lakehouse attached, resolves to the workspace of the notebook.
23
24
  """
24
25
 
25
- workspace, workspace_id = resolve_workspace_name_and_id(workspace)
26
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
26
27
 
27
28
  client = fabric.FabricRestClient()
28
29
  response = client.post(f"/v1/workspaces/{workspace_id}/provisionIdentity")
@@ -33,11 +34,11 @@ def provision_workspace_identity(workspace: Optional[str] = None):
33
34
  lro(client, response)
34
35
 
35
36
  print(
36
- f"{icons.green_dot} A workspace identity has been provisioned for the '{workspace}' workspace."
37
+ f"{icons.green_dot} A workspace identity has been provisioned for the '{workspace_name}' workspace."
37
38
  )
38
39
 
39
40
 
40
- def deprovision_workspace_identity(workspace: Optional[str] = None):
41
+ def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
41
42
  """
42
43
  Deprovisions a workspace identity for a workspace.
43
44
 
@@ -45,13 +46,13 @@ def deprovision_workspace_identity(workspace: Optional[str] = None):
45
46
 
46
47
  Parameters
47
48
  ----------
48
- workspace : str, default=None
49
- The Fabric workspace name.
49
+ workspace : str | uuid.UUID, default=None
50
+ The Fabric workspace name or ID.
50
51
  Defaults to None which resolves to the workspace of the attached lakehouse
51
52
  or if no lakehouse attached, resolves to the workspace of the notebook.
52
53
  """
53
54
 
54
- workspace, workspace_id = resolve_workspace_name_and_id(workspace)
55
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
55
56
 
56
57
  client = fabric.FabricRestClient()
57
58
  response = client.post(f"/v1/workspaces/{workspace_id}/deprovisionIdentity")
@@ -62,5 +63,5 @@ def deprovision_workspace_identity(workspace: Optional[str] = None):
62
63
  lro(client, response)
63
64
 
64
65
  print(
65
- f"{icons.green_dot} The workspace identity has been deprovisioned from the '{workspace}' workspace."
66
+ f"{icons.green_dot} The workspace identity has been deprovisioned from the '{workspace_name}' workspace."
66
67
  )
sempy_labs/_workspaces.py CHANGED
@@ -8,9 +8,12 @@ from sempy_labs._helper_functions import (
8
8
  resolve_capacity_id,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def delete_user_from_workspace(email_address: str, workspace: Optional[str] = None):
14
+ def delete_user_from_workspace(
15
+ email_address: str, workspace: Optional[str | UUID] = None
16
+ ):
14
17
  """
15
18
  Removes a user from a workspace.
16
19
 
@@ -20,13 +23,13 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
20
23
  ----------
21
24
  email_address : str
22
25
  The email address of the user.
23
- workspace : str, default=None
24
- The name of the workspace.
26
+ workspace : str | uuid.UUID, default=None
27
+ The name or ID of the workspace.
25
28
  Defaults to None which resolves to the workspace of the attached lakehouse
26
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
27
30
  """
28
31
 
29
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
32
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
30
33
 
31
34
  client = fabric.PowerBIRestClient()
32
35
  response = client.delete(f"/v1.0/myorg/groups/{workspace_id}/users/{email_address}")
@@ -34,7 +37,7 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
34
37
  if response.status_code != 200:
35
38
  raise FabricHTTPException(response)
36
39
  print(
37
- f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace."
40
+ f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace_name}' workspace."
38
41
  )
39
42
 
40
43
 
@@ -42,7 +45,7 @@ def update_workspace_user(
42
45
  email_address: str,
43
46
  role_name: str,
44
47
  principal_type: Optional[str] = "User",
45
- workspace: Optional[str] = None,
48
+ workspace: Optional[str | UUID] = None,
46
49
  ):
47
50
  """
48
51
  Updates a user's role within a workspace.
@@ -57,13 +60,13 @@ def update_workspace_user(
57
60
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
58
61
  principal_type : str, default='User'
59
62
  The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
60
- workspace : str, default=None
61
- The name of the workspace.
63
+ workspace : str | uuid.UUID, default=None
64
+ The name or ID of the workspace.
62
65
  Defaults to None which resolves to the workspace of the attached lakehouse
63
66
  or if no lakehouse attached, resolves to the workspace of the notebook.
64
67
  """
65
68
 
66
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
69
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
67
70
 
68
71
  role_names = icons.workspace_roles
69
72
  role_name = role_name.capitalize()
@@ -91,11 +94,11 @@ def update_workspace_user(
91
94
  if response.status_code != 200:
92
95
  raise FabricHTTPException(response)
93
96
  print(
94
- f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace}' workspace."
97
+ f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace_name}' workspace."
95
98
  )
96
99
 
97
100
 
98
- def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
101
+ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
99
102
  """
100
103
  A list of all the users of a workspace and their roles.
101
104
 
@@ -103,8 +106,8 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
103
106
 
104
107
  Parameters
105
108
  ----------
106
- workspace : str, default=None
107
- The name of the workspace.
109
+ workspace : str | uuid.UUID, default=None
110
+ The name or ID of the workspace.
108
111
  Defaults to None which resolves to the workspace of the attached lakehouse
109
112
  or if no lakehouse attached, resolves to the workspace of the notebook.
110
113
 
@@ -114,7 +117,7 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
114
117
  A pandas dataframe the users of a workspace and their properties.
115
118
  """
116
119
 
117
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
120
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
118
121
 
119
122
  df = pd.DataFrame(columns=["User Name", "Email Address", "Role", "Type", "User ID"])
120
123
  client = fabric.FabricRestClient()
@@ -143,7 +146,7 @@ def add_user_to_workspace(
143
146
  email_address: str,
144
147
  role_name: str,
145
148
  principal_type: Optional[str] = "User",
146
- workspace: Optional[str] = None,
149
+ workspace: Optional[str | UUID] = None,
147
150
  ):
148
151
  """
149
152
  Adds a user to a workspace.
@@ -158,13 +161,13 @@ def add_user_to_workspace(
158
161
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
159
162
  principal_type : str, default='User'
160
163
  The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
161
- workspace : str, default=None
162
- The name of the workspace.
164
+ workspace : str | uuid.UUID, default=None
165
+ The name or ID of the workspace.
163
166
  Defaults to None which resolves to the workspace of the attached lakehouse
164
167
  or if no lakehouse attached, resolves to the workspace of the notebook.
165
168
  """
166
169
 
167
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
170
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
168
171
 
169
172
  role_names = icons.workspace_roles
170
173
  role_name = role_name.capitalize()
@@ -196,11 +199,13 @@ def add_user_to_workspace(
196
199
  if response.status_code != 200:
197
200
  raise FabricHTTPException(response)
198
201
  print(
199
- f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace}' workspace."
202
+ f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace_name}' workspace."
200
203
  )
201
204
 
202
205
 
203
- def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] = None):
206
+ def assign_workspace_to_capacity(
207
+ capacity_name: str, workspace: Optional[str | UUID] = None
208
+ ):
204
209
  """
205
210
  Assigns a workspace to a capacity.
206
211
 
@@ -210,13 +215,13 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
210
215
  ----------
211
216
  capacity_name : str
212
217
  The name of the capacity.
213
- workspace : str, default=None
214
- The name of the Fabric workspace.
218
+ workspace : str | uuid.UUID, default=None
219
+ The name or ID of the Fabric workspace.
215
220
  Defaults to None which resolves to the workspace of the attached lakehouse
216
221
  or if no lakehouse attached, resolves to the workspace of the notebook.
217
222
  """
218
223
 
219
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
224
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
220
225
  capacity_id = resolve_capacity_id(capacity_name=capacity_name)
221
226
 
222
227
  request_body = {"capacityId": capacity_id}
@@ -230,11 +235,11 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
230
235
  if response.status_code not in [200, 202]:
231
236
  raise FabricHTTPException(response)
232
237
  print(
233
- f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
238
+ f"{icons.green_dot} The '{workspace_name}' workspace has been assigned to the '{capacity_name}' capacity."
234
239
  )
235
240
 
236
241
 
237
- def unassign_workspace_from_capacity(workspace: Optional[str] = None):
242
+ def unassign_workspace_from_capacity(workspace: Optional[str | UUID] = None):
238
243
  """
239
244
  Unassigns a workspace from its assigned capacity.
240
245
 
@@ -242,13 +247,13 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
242
247
 
243
248
  Parameters
244
249
  ----------
245
- workspace : str, default=None
246
- The name of the Fabric workspace.
250
+ workspace : str | uuid.UUID, default=None
251
+ The name or ID of the Fabric workspace.
247
252
  Defaults to None which resolves to the workspace of the attached lakehouse
248
253
  or if no lakehouse attached, resolves to the workspace of the notebook.
249
254
  """
250
255
 
251
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
256
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
252
257
 
253
258
  client = fabric.FabricRestClient()
254
259
  response = client.post(f"/v1/workspaces/{workspace_id}/unassignFromCapacity")
@@ -256,11 +261,13 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
256
261
  if response.status_code not in [200, 202]:
257
262
  raise FabricHTTPException(response)
258
263
  print(
259
- f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
264
+ f"{icons.green_dot} The '{workspace_name}' workspace has been unassigned from its capacity."
260
265
  )
261
266
 
262
267
 
263
- def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataFrame:
268
+ def list_workspace_role_assignments(
269
+ workspace: Optional[str | UUID] = None,
270
+ ) -> pd.DataFrame:
264
271
  """
265
272
  Shows the members of a given workspace.
266
273
 
@@ -268,8 +275,8 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
268
275
 
269
276
  Parameters
270
277
  ----------
271
- workspace : str, default=None
272
- The Fabric workspace name.
278
+ workspace : str | uuid.UUID, default=None
279
+ The Fabric workspace name or ID.
273
280
  Defaults to None which resolves to the workspace of the attached lakehouse
274
281
  or if no lakehouse attached, resolves to the workspace of the notebook.
275
282
 
@@ -279,7 +286,7 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
279
286
  A pandas dataframe showing the members of a given workspace and their roles.
280
287
  """
281
288
 
282
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
289
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
283
290
 
284
291
  df = pd.DataFrame(columns=["User Name", "User Email", "Role Name", "Type"])
285
292
 
@@ -1,3 +1,6 @@
1
+ from sempy_labs.admin._scanner import (
2
+ scan_workspaces,
3
+ )
1
4
  from sempy_labs.admin._basic_functions import (
2
5
  assign_workspaces_to_capacity,
3
6
  unassign_workspaces_from_capacity,
@@ -66,4 +69,5 @@ __all__ = [
66
69
  "list_git_connections",
67
70
  "list_reports",
68
71
  "get_capacity_assignment_status",
72
+ "scan_workspaces",
69
73
  ]