semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (71) hide show
  1. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
  2. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
  3. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +33 -4
  5. sempy_labs/_capacities.py +59 -128
  6. sempy_labs/_capacity_migration.py +19 -21
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dashboards.py +60 -0
  9. sempy_labs/_data_pipelines.py +5 -31
  10. sempy_labs/_dataflows.py +2 -2
  11. sempy_labs/_dax_query_view.py +55 -0
  12. sempy_labs/_delta_analyzer.py +16 -14
  13. sempy_labs/_environments.py +28 -49
  14. sempy_labs/_eventhouses.py +27 -53
  15. sempy_labs/_eventstreams.py +16 -34
  16. sempy_labs/_external_data_shares.py +4 -10
  17. sempy_labs/_gateways.py +4 -4
  18. sempy_labs/_generate_semantic_model.py +2 -2
  19. sempy_labs/_git.py +90 -1
  20. sempy_labs/_graphQL.py +8 -21
  21. sempy_labs/_helper_functions.py +440 -91
  22. sempy_labs/_kql_databases.py +24 -35
  23. sempy_labs/_kql_querysets.py +15 -32
  24. sempy_labs/_list_functions.py +17 -192
  25. sempy_labs/_managed_private_endpoints.py +9 -2
  26. sempy_labs/_mirrored_databases.py +17 -49
  27. sempy_labs/_ml_experiments.py +6 -31
  28. sempy_labs/_ml_models.py +4 -28
  29. sempy_labs/_model_bpa.py +4 -11
  30. sempy_labs/_model_bpa_bulk.py +23 -27
  31. sempy_labs/_mounted_data_factories.py +119 -0
  32. sempy_labs/_notebooks.py +16 -26
  33. sempy_labs/_one_lake_integration.py +2 -1
  34. sempy_labs/_semantic_models.py +20 -0
  35. sempy_labs/_sql.py +13 -8
  36. sempy_labs/_sqldatabase.py +61 -100
  37. sempy_labs/_utils.py +42 -0
  38. sempy_labs/_vertipaq.py +25 -13
  39. sempy_labs/_warehouses.py +19 -20
  40. sempy_labs/_workloads.py +23 -9
  41. sempy_labs/_workspace_identity.py +6 -0
  42. sempy_labs/_workspaces.py +55 -7
  43. sempy_labs/admin/__init__.py +21 -1
  44. sempy_labs/admin/_apps.py +1 -1
  45. sempy_labs/admin/_artifacts.py +62 -0
  46. sempy_labs/admin/_basic_functions.py +3 -54
  47. sempy_labs/admin/_capacities.py +61 -0
  48. sempy_labs/admin/_reports.py +74 -0
  49. sempy_labs/admin/_scanner.py +2 -2
  50. sempy_labs/admin/_shared.py +4 -2
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  54. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  55. sempy_labs/directlake/_dl_helper.py +0 -6
  56. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  57. sempy_labs/directlake/_guardrails.py +2 -1
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
  60. sempy_labs/lakehouse/__init__.py +2 -0
  61. sempy_labs/lakehouse/_lakehouse.py +6 -7
  62. sempy_labs/lakehouse/_shortcuts.py +198 -57
  63. sempy_labs/migration/_migration_validation.py +0 -4
  64. sempy_labs/report/_download_report.py +4 -6
  65. sempy_labs/report/_generate_report.py +15 -23
  66. sempy_labs/report/_report_bpa.py +12 -19
  67. sempy_labs/report/_report_functions.py +2 -1
  68. sempy_labs/report/_report_rebind.py +8 -6
  69. sempy_labs/tom/_model.py +34 -16
  70. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  71. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
@@ -1,70 +1,69 @@
1
- import sempy.fabric as fabric
2
1
  from sempy_labs._helper_functions import (
3
2
  resolve_workspace_name_and_id,
4
3
  _base_api,
5
4
  _create_dataframe,
6
5
  _update_dataframe_datatypes,
6
+ create_item,
7
+ delete_item,
7
8
  )
8
9
  import pandas as pd
9
10
  from typing import Optional
10
- import sempy_labs._icons as icons
11
11
  from uuid import UUID
12
12
 
13
- ## Still debugging the creation of an sql database
14
- # def create_warehouse(
15
- # warehouse: str,
16
- # description: Optional[str] = None,
17
- # case_insensitive_collation: bool = False,
18
- # workspace: Optional[str | UUID] = None,
19
- # ):
20
- # """
21
- # Creates a Fabric warehouse.
22
-
23
- # This is a wrapper function for the following API: `Items - Create Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/create-warehouse>`_.
24
-
25
- # Parameters
26
- # ----------
27
- # warehouse: str
28
- # Name of the warehouse.
29
- # description : str, default=None
30
- # A description of the warehouse.
31
- # case_insensitive_collation: bool, default=False
32
- # If True, creates the warehouse with case-insensitive collation.
33
- # workspace : str | uuid.UUID, default=None
34
- # The Fabric workspace name or ID.
35
- # Defaults to None which resolves to the workspace of the attached lakehouse
36
- # or if no lakehouse attached, resolves to the workspace of the notebook.
37
- # """
38
-
39
- # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
-
41
- # payload = {"displayName": warehouse}
42
-
43
- # if description:
44
- # payload["description"] = description
45
- # if case_insensitive_collation:
46
- # payload.setdefault("creationPayload", {})
47
- # payload["creationPayload"][
48
- # "defaultCollation"
49
- # ] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
50
-
51
- # _base_api(
52
- # request=f"/v1/workspaces/{workspace_id}/warehouses",
53
- # payload=payload,
54
- # method="post",
55
- # lro_return_status_code=True,
56
- # status_codes=[201, 202],
57
- # )
58
-
59
- # print(
60
- # f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
61
- # )
62
-
63
-
64
- def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
13
+
14
+ def create_sql_database(
15
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
16
+ ):
17
+ """
18
+ Creates a SQL database.
19
+
20
+ This is a wrapper function for the following API: `Items - Create SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/create-sql-database>`_.
21
+
22
+ Parameters
23
+ ----------
24
+ name: str
25
+ Name of the SQL database.
26
+ description : str, default=None
27
+ A description of the SQL database.
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ """
33
+
34
+ create_item(
35
+ name=name, description=description, type="SQLDatabase", workspace=workspace
36
+ )
37
+
38
+
39
+ def delete_sql_database(
40
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
41
+ ):
42
+ """
43
+ Deletes a SQL Database.
44
+
45
+ This is a wrapper function for the following API: `Items - Delete SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/delete-sql-database>`_.
46
+
47
+ Parameters
48
+ ----------
49
+ sql_database: str | uuid.UUID
50
+ Name of the SQL database.
51
+ workspace : str | uuid.UUID, default=None
52
+ The Fabric workspace name or ID.
53
+ Defaults to None which resolves to the workspace of the attached lakehouse
54
+ or if no lakehouse attached, resolves to the workspace of the notebook.
65
55
  """
66
- Shows the databses within a workspace.
67
56
 
57
+ delete_item(item=sql_database, type="SQLDatabase", workspace=workspace)
58
+
59
+
60
+ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
61
+ """
62
+ Lists all SQL databases in the Fabric workspace.
63
+
64
+ This is a wrapper function for the following API: `Items - List SQL Databases <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/list-sql-databases>`_.
65
+
66
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
68
67
 
69
68
  Parameters
70
69
  ----------
@@ -76,45 +75,39 @@ def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
76
75
  Returns
77
76
  -------
78
77
  pandas.DataFrame
79
- A pandas dataframe showing the SQLDabatases within a workspace.
78
+ A pandas dataframe showing a list of SQL databases in the Fabric workspace.
80
79
  """
81
80
 
81
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
82
+
82
83
  columns = {
83
84
  "SQL Database Name": "string",
84
85
  "SQL Database Id": "string",
85
86
  "Description": "string",
86
- "Connection Type": "string",
87
87
  "Connection Info": "string",
88
88
  "Database Name": "string",
89
89
  "Server FQDN": "string",
90
- "Provisioning Status": "string",
91
- "Created Date": "datetime",
92
- "Last Updated Time UTC": "datetime",
93
90
  }
94
91
  df = _create_dataframe(columns=columns)
95
92
 
96
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
97
-
98
93
  responses = _base_api(
99
- request=f"/v1/workspaces/{workspace_id}/sqldatabases", uses_pagination=True
94
+ request=f"/v1/workspaces/{workspace_id}/SQLDatabases",
95
+ uses_pagination=True,
96
+ client="fabric_sp",
100
97
  )
101
98
 
102
99
  for r in responses:
103
100
  for v in r.get("value", []):
104
101
  prop = v.get("properties", {})
105
-
106
102
  new_data = {
107
103
  "SQL Database Name": v.get("displayName"),
108
104
  "SQL Database Id": v.get("id"),
109
105
  "Description": v.get("description"),
110
- "Connection Type": v.get("type"),
111
106
  "Connection Info": prop.get("connectionInfo"),
112
107
  "Database Name": prop.get("databaseName"),
113
108
  "Server FQDN": prop.get("serverFqdn"),
114
- "Provisioning Status": prop.get("provisioningState"),
115
- "Created Date": prop.get("createdDate"),
116
- "Last Updated Time UTC": prop.get("lastUpdatedTimeUtc"),
117
109
  }
110
+
118
111
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
119
112
 
120
113
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
@@ -122,38 +115,6 @@ def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
122
115
  return df
123
116
 
124
117
 
125
- ## Still debugging the deletion of an sql database
126
- # def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
127
- # """
128
- # Deletes a Fabric warehouse.
129
-
130
- # This is a wrapper function for the following API: `Items - Delete Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/delete-warehouse>`_.
131
-
132
- # Parameters
133
- # ----------
134
- # name: str
135
- # Name of the warehouse.
136
- # workspace : str | uuid.UUID, default=None
137
- # The Fabric workspace name or ID.
138
- # Defaults to None which resolves to the workspace of the attached lakehouse
139
- # or if no lakehouse attached, resolves to the workspace of the notebook.
140
- # """
141
-
142
- # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
143
-
144
- # item_id = fabric.resolve_item_id(
145
- # item_name=name, type="Warehouse", workspace=workspace_id
146
- # )
147
-
148
- # _base_api(
149
- # request=f"/v1/workspaces/{workspace_id}/warehouses/{item_id}", method="delete"
150
- # )
151
-
152
- # print(
153
- # f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
154
- # )
155
-
156
-
157
118
  def get_sql_database_tables(
158
119
  sql_database: str | UUID, workspace: Optional[str | UUID] = None
159
120
  ) -> pd.DataFrame:
sempy_labs/_utils.py ADDED
@@ -0,0 +1,42 @@
1
+ item_types = {
2
+ "Dashboard": ["Dashboard", "dashboards"],
3
+ "DataPipeline": ["Data Pipeline", "dataPipelines", "pipeline-content.json"],
4
+ "Datamart": ["Datamart", "datamarts"],
5
+ "Environment": ["Environment", "environments"],
6
+ "Eventhouse": ["Eventhouse", "eventhouses", "EventhouseProperties.json"],
7
+ "Eventstream": ["Eventstream", "eventstreams", "eventstream.json"],
8
+ "GraphQLApi": ["GraphQL Api", "GraphQLApis"],
9
+ "KQLDashboard": ["KQL Dashboard", "kqlDashboards", "RealTimeDashboard.json"],
10
+ "KQLDatabase": [
11
+ "KQL Database",
12
+ "kqlDatabases",
13
+ ], # "DatabaseProperties.json", "DatabaseSchema.kql"
14
+ "KQLQueryset": ["KQL Queryset", "kqlQuerysets", "RealTimeQueryset.json"],
15
+ "Lakehouse": ["Lakehouse", "lakehouses"],
16
+ "MLExperiment": ["ML Experiment", "mlExperiments"],
17
+ "MLModel": ["ML Model", "mlModels"],
18
+ "MirroredDatabase": [
19
+ "Mirrored Database",
20
+ "mirroredDatabases",
21
+ "mirroredDatabase.json",
22
+ ],
23
+ "MirroredWarehouse": ["Mirrored Warehouse", "mirroredWarehouses"],
24
+ "MountedDataFactory": [
25
+ "Mounted Data Factory",
26
+ "mountedDataFactories",
27
+ "mountedDataFactory-content.json",
28
+ ],
29
+ "Notebook": ["Notebook", "notebooks"],
30
+ "PaginatedReport": ["Paginated Report", "paginatedReports"],
31
+ "Reflex": ["Reflex", "reflexes", "ReflexEntities.json"],
32
+ "Report": ["Report", "reports", "report.json"],
33
+ "SQLDatabase": ["SQL Database", "sqlDatabases"],
34
+ "SQLEndpoint": ["SQL Endpoint", "sqlEndpoints"],
35
+ "SemanticModel": ["Semantic Model", "semanticModels", "model.bim"],
36
+ "SparkJobDefinition": [
37
+ "Spark Job Definition",
38
+ "sparkJobDefinitions",
39
+ "SparkJobDefinitionV1.json",
40
+ ],
41
+ "Warehouse": ["Warehouse", "warehouses"],
42
+ }
sempy_labs/_vertipaq.py CHANGED
@@ -15,6 +15,8 @@ from sempy_labs._helper_functions import (
15
15
  resolve_workspace_name_and_id,
16
16
  resolve_dataset_name_and_id,
17
17
  _create_spark_session,
18
+ resolve_workspace_id,
19
+ resolve_workspace_name,
18
20
  )
19
21
  from sempy_labs._list_functions import list_relationships, list_tables
20
22
  from sempy_labs.lakehouse import lakehouse_attached, get_lakehouse_tables
@@ -33,9 +35,11 @@ def vertipaq_analyzer(
33
35
  export: Optional[str] = None,
34
36
  read_stats_from_data: bool = False,
35
37
  **kwargs,
36
- ):
38
+ ) -> dict[str, pd.DataFrame]:
37
39
  """
38
- Displays an HTML visualization of the Vertipaq Analyzer statistics from a semantic model.
40
+ Displays an HTML visualization of the `Vertipaq Analyzer <https://www.sqlbi.com/tools/vertipaq-analyzer/>`_ statistics from a semantic model.
41
+
42
+ `Vertipaq Analyzer <https://www.sqlbi.com/tools/vertipaq-analyzer/>`_ is an open-sourced tool built by SQLBI. It provides a detailed analysis of the VertiPaq engine, which is the in-memory engine used by Power BI and Analysis Services Tabular models.
39
43
 
40
44
  Parameters
41
45
  ----------
@@ -51,6 +55,11 @@ def vertipaq_analyzer(
51
55
  Default value: None.
52
56
  read_stats_from_data : bool, default=False
53
57
  Setting this parameter to true has the function get Column Cardinality and Missing Rows using DAX (Direct Lake semantic models achieve this using a Spark query to the lakehouse).
58
+
59
+ Returns
60
+ -------
61
+ dict[str, pandas.DataFrame]
62
+ A dictionary of pandas dataframes showing the vertipaq analyzer statistics.
54
63
  """
55
64
 
56
65
  from sempy_labs.tom import connect_semantic_model
@@ -189,8 +198,10 @@ def vertipaq_analyzer(
189
198
  & (~dfC["Column Name"].str.startswith("RowNumber-"))
190
199
  ]
191
200
 
192
- object_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
193
- current_workspace_id = fabric.get_workspace_id()
201
+ object_workspace = resolve_workspace_name(
202
+ workspace_id=lakehouse_workspace_id
203
+ )
204
+ current_workspace_id = resolve_workspace_id()
194
205
  if current_workspace_id != lakehouse_workspace_id:
195
206
  lakeTables = get_lakehouse_tables(
196
207
  lakehouse=lakehouse_name, workspace=object_workspace
@@ -502,6 +513,14 @@ def vertipaq_analyzer(
502
513
 
503
514
  if export is None:
504
515
  visualize_vertipaq(dfs)
516
+ return {
517
+ "Model Summary": export_Model,
518
+ "Tables": export_Table,
519
+ "Partitions": export_Part,
520
+ "Columns": export_Col,
521
+ "Relationships": export_Rel,
522
+ "Hierarchies": export_Hier,
523
+ }
505
524
 
506
525
  # Export vertipaq to delta tables in lakehouse
507
526
  if export in ["table", "zip"]:
@@ -511,22 +530,15 @@ def vertipaq_analyzer(
511
530
  )
512
531
 
513
532
  if export == "table":
514
- lakehouse_id = fabric.get_lakehouse_id()
515
- lake_workspace = fabric.resolve_workspace_name()
516
- lakehouse = resolve_lakehouse_name(
517
- lakehouse_id=lakehouse_id, workspace=lake_workspace
518
- )
519
533
  lakeTName = "vertipaqanalyzer_model"
520
534
 
521
- lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
535
+ lakeT = get_lakehouse_tables()
522
536
  lakeT_filt = lakeT[lakeT["Table Name"] == lakeTName]
523
537
 
524
538
  if len(lakeT_filt) == 0:
525
539
  runId = 1
526
540
  else:
527
- max_run_id = _get_column_aggregate(
528
- lakehouse=lakehouse, table_name=lakeTName
529
- )
541
+ max_run_id = _get_column_aggregate(table_name=lakeTName)
530
542
  runId = max_run_id + 1
531
543
 
532
544
  dfMap = {
sempy_labs/_warehouses.py CHANGED
@@ -1,9 +1,9 @@
1
- import sempy.fabric as fabric
2
1
  from sempy_labs._helper_functions import (
3
2
  resolve_workspace_name_and_id,
4
3
  _base_api,
5
4
  _create_dataframe,
6
5
  _update_dataframe_datatypes,
6
+ delete_item,
7
7
  )
8
8
  import pandas as pd
9
9
  from typing import Optional
@@ -16,7 +16,7 @@ def create_warehouse(
16
16
  description: Optional[str] = None,
17
17
  case_insensitive_collation: bool = False,
18
18
  workspace: Optional[str | UUID] = None,
19
- ):
19
+ ) -> UUID:
20
20
  """
21
21
  Creates a Fabric warehouse.
22
22
 
@@ -34,6 +34,11 @@ def create_warehouse(
34
34
  The Fabric workspace name or ID.
35
35
  Defaults to None which resolves to the workspace of the attached lakehouse
36
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
37
+
38
+ Returns
39
+ -------
40
+ uuid.UUID
41
+ The ID of the created warehouse.
37
42
  """
38
43
 
39
44
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -48,7 +53,7 @@ def create_warehouse(
48
53
  "defaultCollation"
49
54
  ] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
50
55
 
51
- _base_api(
56
+ response = _base_api(
52
57
  request=f"/v1/workspaces/{workspace_id}/warehouses",
53
58
  payload=payload,
54
59
  method="post",
@@ -60,6 +65,8 @@ def create_warehouse(
60
65
  f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
61
66
  )
62
67
 
68
+ return response.get("id")
69
+
63
70
 
64
71
  def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
65
72
  """
@@ -67,6 +74,8 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
67
74
 
68
75
  This is a wrapper function for the following API: `Items - List Warehouses <https://learn.microsoft.com/rest/api/fabric/warehouse/items/list-warehouses>`_.
69
76
 
77
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
78
+
70
79
  Parameters
71
80
  ----------
72
81
  workspace : str | uuid.UUID, default=None
@@ -93,7 +102,9 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
93
102
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
94
103
 
95
104
  responses = _base_api(
96
- request=f"/v1/workspaces/{workspace_id}/warehouses", uses_pagination=True
105
+ request=f"/v1/workspaces/{workspace_id}/warehouses",
106
+ uses_pagination=True,
107
+ client="fabric_sp",
97
108
  )
98
109
 
99
110
  for r in responses:
@@ -115,7 +126,7 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
115
126
  return df
116
127
 
117
128
 
118
- def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
129
+ def delete_warehouse(name: str | UUID, workspace: Optional[str | UUID] = None):
119
130
  """
120
131
  Deletes a Fabric warehouse.
121
132
 
@@ -123,27 +134,15 @@ def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
123
134
 
124
135
  Parameters
125
136
  ----------
126
- name: str
127
- Name of the warehouse.
137
+ name: str | uuid.UUID
138
+ Name or ID of the warehouse.
128
139
  workspace : str | uuid.UUID, default=None
129
140
  The Fabric workspace name or ID.
130
141
  Defaults to None which resolves to the workspace of the attached lakehouse
131
142
  or if no lakehouse attached, resolves to the workspace of the notebook.
132
143
  """
133
144
 
134
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
135
-
136
- item_id = fabric.resolve_item_id(
137
- item_name=name, type="Warehouse", workspace=workspace_id
138
- )
139
-
140
- _base_api(
141
- request=f"/v1/workspaces/{workspace_id}/warehouses/{item_id}", method="delete"
142
- )
143
-
144
- print(
145
- f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
146
- )
145
+ delete_item(item=name, type="Warehouse", workspace=workspace)
147
146
 
148
147
 
149
148
  def get_warehouse_tables(
sempy_labs/_workloads.py CHANGED
@@ -6,9 +6,10 @@ from sempy_labs._helper_functions import (
6
6
  _base_api,
7
7
  _create_dataframe,
8
8
  )
9
+ from uuid import UUID
9
10
 
10
11
 
11
- def list_workloads(capacity_name: str) -> pd.DataFrame:
12
+ def list_workloads(capacity: str | UUID, **kwargs) -> pd.DataFrame:
12
13
  """
13
14
  Returns the current state of the specified capacity workloads.
14
15
  If a workload is enabled, the percentage of maximum memory that the workload can consume is also returned.
@@ -17,8 +18,8 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
17
18
 
18
19
  Parameters
19
20
  ----------
20
- capacity_name : str
21
- The capacity name.
21
+ capacity : str | uuid.UUID
22
+ The capacity name or ID.
22
23
 
23
24
  Returns
24
25
  -------
@@ -28,6 +29,12 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
28
29
 
29
30
  from sempy_labs._helper_functions import resolve_capacity_id
30
31
 
32
+ if "capacity_name" in kwargs:
33
+ capacity = kwargs["capacity_name"]
34
+ print(
35
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
36
+ )
37
+
31
38
  columns = {
32
39
  "Workload Name": "string",
33
40
  "State": "string",
@@ -35,7 +42,7 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
35
42
  }
36
43
  df = _create_dataframe(columns=columns)
37
44
 
38
- capacity_id = resolve_capacity_id(capacity_name=capacity_name)
45
+ capacity_id = resolve_capacity_id(capacity=capacity)
39
46
 
40
47
  response = _base_api(request=f"/v1.0/myorg/capacities/{capacity_id}/Workloads")
41
48
 
@@ -53,10 +60,11 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
53
60
 
54
61
 
55
62
  def patch_workload(
56
- capacity_name: str,
63
+ capacity: str | UUID,
57
64
  workload_name: str,
58
65
  state: Optional[str] = None,
59
66
  max_memory_percentage: Optional[int] = None,
67
+ **kwargs,
60
68
  ):
61
69
  """
62
70
  Changes the state of a specific workload to Enabled or Disabled.
@@ -66,8 +74,8 @@ def patch_workload(
66
74
 
67
75
  Parameters
68
76
  ----------
69
- capacity_name : str
70
- The capacity name.
77
+ capacity : str | uuid.UUID
78
+ The capacity name or ID.
71
79
  workload_name : str
72
80
  The workload name.
73
81
  state : str, default=None
@@ -78,7 +86,13 @@ def patch_workload(
78
86
 
79
87
  from sempy_labs._helper_functions import resolve_capacity_id
80
88
 
81
- capacity_id = resolve_capacity_id(capacity_name=capacity_name)
89
+ if "capacity_name" in kwargs:
90
+ capacity = kwargs["capacity_name"]
91
+ print(
92
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
93
+ )
94
+
95
+ capacity_id = resolve_capacity_id(capacity=capacity)
82
96
 
83
97
  states = ["Disabled", "Enabled", "Unsupported"]
84
98
  state = state.capitalize()
@@ -119,5 +133,5 @@ def patch_workload(
119
133
  _base_api(request=url, method="patch", payload=payload)
120
134
 
121
135
  print(
122
- f"The '{workload_name}' workload within the '{capacity_name}' capacity has been updated accordingly."
136
+ f"The '{workload_name}' workload within the '{capacity}' capacity has been updated accordingly."
123
137
  )
@@ -13,6 +13,8 @@ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
13
13
 
14
14
  This is a wrapper function for the following API: `Workspaces - Provision Identity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/provision-identity>`_.
15
15
 
16
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
17
+
16
18
  Parameters
17
19
  ----------
18
20
  workspace : str | uuid.UUID, default=None
@@ -28,6 +30,7 @@ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
28
30
  method="post",
29
31
  lro_return_status_code=True,
30
32
  status_codes=None,
33
+ client="fabric_sp",
31
34
  )
32
35
 
33
36
  print(
@@ -41,6 +44,8 @@ def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
41
44
 
42
45
  This is a wrapper function for the following API: `Workspaces - Derovision Identity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/deprovision-identity>`_.
43
46
 
47
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
48
+
44
49
  Parameters
45
50
  ----------
46
51
  workspace : str | uuid.UUID, default=None
@@ -56,6 +61,7 @@ def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
56
61
  method="post",
57
62
  lro_return_status_code=True,
58
63
  status_codes=None,
64
+ client="fabric_sp",
59
65
  )
60
66
 
61
67
  print(