semantic-link-labs 0.9.5__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (48) hide show
  1. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +48 -47
  3. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +15 -1
  5. sempy_labs/_capacities.py +37 -1
  6. sempy_labs/_capacity_migration.py +11 -14
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dataflows.py +2 -2
  9. sempy_labs/_dax_query_view.py +55 -0
  10. sempy_labs/_delta_analyzer.py +16 -14
  11. sempy_labs/_environments.py +8 -1
  12. sempy_labs/_eventhouses.py +5 -1
  13. sempy_labs/_external_data_shares.py +4 -10
  14. sempy_labs/_generate_semantic_model.py +2 -1
  15. sempy_labs/_graphQL.py +5 -1
  16. sempy_labs/_helper_functions.py +272 -51
  17. sempy_labs/_kql_databases.py +5 -1
  18. sempy_labs/_list_functions.py +5 -37
  19. sempy_labs/_managed_private_endpoints.py +9 -2
  20. sempy_labs/_mirrored_databases.py +3 -1
  21. sempy_labs/_ml_experiments.py +1 -1
  22. sempy_labs/_model_bpa.py +2 -11
  23. sempy_labs/_model_bpa_bulk.py +23 -27
  24. sempy_labs/_one_lake_integration.py +2 -1
  25. sempy_labs/_semantic_models.py +20 -0
  26. sempy_labs/_sql.py +6 -2
  27. sempy_labs/_sqldatabase.py +61 -100
  28. sempy_labs/_vertipaq.py +8 -11
  29. sempy_labs/_warehouses.py +14 -3
  30. sempy_labs/_workspace_identity.py +6 -0
  31. sempy_labs/_workspaces.py +42 -2
  32. sempy_labs/admin/_basic_functions.py +3 -2
  33. sempy_labs/admin/_scanner.py +2 -2
  34. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  35. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  36. sempy_labs/directlake/_dl_helper.py +0 -6
  37. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  38. sempy_labs/directlake/_guardrails.py +2 -1
  39. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  40. sempy_labs/directlake/_update_directlake_partition_entity.py +2 -2
  41. sempy_labs/lakehouse/_shortcuts.py +7 -5
  42. sempy_labs/migration/_migration_validation.py +0 -4
  43. sempy_labs/report/_download_report.py +4 -6
  44. sempy_labs/report/_generate_report.py +6 -6
  45. sempy_labs/report/_report_functions.py +2 -1
  46. sempy_labs/report/_report_rebind.py +8 -6
  47. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  48. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
@@ -23,6 +23,8 @@ def create_managed_private_endpoint(
23
23
 
24
24
  This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`.
25
25
 
26
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
27
+
26
28
  Parameters
27
29
  ----------
28
30
  name: str
@@ -60,6 +62,7 @@ def create_managed_private_endpoint(
60
62
  status_codes=[201, 202],
61
63
  payload=request_body,
62
64
  lro_return_status_code=True,
65
+ client="fabric_sp",
63
66
  )
64
67
  _print_success(
65
68
  item_name=name,
@@ -77,6 +80,8 @@ def list_managed_private_endpoints(
77
80
 
78
81
  This is a wrapper function for the following API: `Managed Private Endpoints - List Workspace Managed Private Endpoints <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-workspace-managed-private-endpoints>`.
79
82
 
83
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
84
+
80
85
  Parameters
81
86
  ----------
82
87
  workspace : str | uuid.UUID, default=None
@@ -106,7 +111,7 @@ def list_managed_private_endpoints(
106
111
  responses = _base_api(
107
112
  request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints",
108
113
  uses_pagination=True,
109
- status_codes=200,
114
+ client="fabric_sp",
110
115
  )
111
116
 
112
117
  for r in responses:
@@ -134,6 +139,8 @@ def delete_managed_private_endpoint(
134
139
 
135
140
  This is a wrapper function for the following API: `Managed Private Endpoints - Delete Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/delete-workspace-managed-private-endpoint>`.
136
141
 
142
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
143
+
137
144
  Parameters
138
145
  ----------
139
146
  managed_private_endpoint: str | uuid.UUID
@@ -162,7 +169,7 @@ def delete_managed_private_endpoint(
162
169
  _base_api(
163
170
  request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}",
164
171
  method="delete",
165
- status_codes=200,
172
+ client="fabric_sp",
166
173
  )
167
174
 
168
175
  _print_success(
@@ -22,6 +22,8 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
22
22
 
23
23
  This is a wrapper function for the following API: `Items - List Mirrored Databases <https://learn.microsoft.com/rest/api/fabric/mirroredwarehouse/items/list-mirrored-databases>`_.
24
24
 
25
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
26
+
25
27
  Parameters
26
28
  ----------
27
29
  workspace : str | uuid.UUID, default=None
@@ -50,8 +52,8 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
50
52
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
51
53
  responses = _base_api(
52
54
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases",
53
- status_codes=200,
54
55
  uses_pagination=True,
56
+ client="fabric_sp",
55
57
  )
56
58
 
57
59
  for r in responses:
@@ -73,7 +73,7 @@ def create_ml_experiment(
73
73
  name: str
74
74
  Name of the ML experiment.
75
75
  description : str, default=None
76
- A description of the environment.
76
+ A description of the ML experiment.
77
77
  workspace : str | uuid.UUID, default=None
78
78
  The Fabric workspace name or ID.
79
79
  Defaults to None which resolves to the workspace of the attached lakehouse
sempy_labs/_model_bpa.py CHANGED
@@ -6,7 +6,6 @@ from IPython.display import display, HTML
6
6
  from sempy_labs._model_dependencies import get_model_calc_dependencies
7
7
  from sempy_labs._helper_functions import (
8
8
  format_dax_object_name,
9
- resolve_lakehouse_name,
10
9
  create_relationship_name,
11
10
  save_as_delta_table,
12
11
  resolve_workspace_capacity,
@@ -389,13 +388,7 @@ def run_model_bpa(
389
388
  dfExport = finalDF.copy()
390
389
  delta_table_name = "modelbparesults"
391
390
 
392
- lakehouse_id = fabric.get_lakehouse_id()
393
- lake_workspace = fabric.get_workspace_id()
394
- lakehouse = resolve_lakehouse_name(
395
- lakehouse_id=lakehouse_id, workspace=lake_workspace
396
- )
397
-
398
- lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
391
+ lakeT = get_lakehouse_tables()
399
392
  lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
400
393
 
401
394
  dfExport["Severity"].replace(icons.severity_mapping, inplace=True)
@@ -403,9 +396,7 @@ def run_model_bpa(
403
396
  if len(lakeT_filt) == 0:
404
397
  runId = 1
405
398
  else:
406
- max_run_id = _get_column_aggregate(
407
- lakehouse=lakehouse, table_name=delta_table_name
408
- )
399
+ max_run_id = _get_column_aggregate(table_name=delta_table_name)
409
400
  runId = max_run_id + 1
410
401
 
411
402
  now = datetime.datetime.now()
@@ -7,6 +7,8 @@ from sempy_labs._helper_functions import (
7
7
  resolve_workspace_capacity,
8
8
  retry,
9
9
  _get_column_aggregate,
10
+ resolve_workspace_id,
11
+ resolve_lakehouse_name_and_id,
10
12
  )
11
13
  from sempy_labs.lakehouse import (
12
14
  get_lakehouse_tables,
@@ -16,6 +18,7 @@ from sempy_labs._model_bpa import run_model_bpa
16
18
  from typing import Optional, List
17
19
  from sempy._utils._log import log
18
20
  import sempy_labs._icons as icons
21
+ from uuid import UUID
19
22
 
20
23
 
21
24
  @log
@@ -66,17 +69,12 @@ def run_model_bpa_bulk(
66
69
 
67
70
  now = datetime.datetime.now()
68
71
  output_table = "modelbparesults"
69
- lakehouse_workspace = fabric.resolve_workspace_name()
70
- lakehouse_id = fabric.get_lakehouse_id()
71
- lakehouse = resolve_lakehouse_name(
72
- lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
73
- )
74
- lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)
72
+ lakeT = get_lakehouse_tables()
75
73
  lakeT_filt = lakeT[lakeT["Table Name"] == output_table]
76
74
  if len(lakeT_filt) == 0:
77
75
  runId = 1
78
76
  else:
79
- max_run_id = _get_column_aggregate(lakehouse=lakehouse, table_name=output_table)
77
+ max_run_id = _get_column_aggregate(table_name=output_table)
80
78
  runId = max_run_id + 1
81
79
 
82
80
  if isinstance(workspace, str):
@@ -170,7 +168,7 @@ def run_model_bpa_bulk(
170
168
 
171
169
  # Append save results individually for each workspace (so as not to create a giant dataframe)
172
170
  print(
173
- f"{icons.in_progress} Saving the Model BPA results of the '{wksp}' workspace to the '{output_table}' within the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace..."
171
+ f"{icons.in_progress} Saving the Model BPA results of the '{wksp}' workspace to the '{output_table}' within the lakehouse attached to this notebook..."
174
172
  )
175
173
 
176
174
  schema = {
@@ -195,8 +193,8 @@ def run_model_bpa_bulk(
195
193
  @log
196
194
  def create_model_bpa_semantic_model(
197
195
  dataset: Optional[str] = icons.model_bpa_name,
198
- lakehouse: Optional[str] = None,
199
- lakehouse_workspace: Optional[str] = None,
196
+ lakehouse: Optional[str | UUID] = None,
197
+ lakehouse_workspace: Optional[str | UUID] = None,
200
198
  ):
201
199
  """
202
200
  Dynamically generates a Direct Lake semantic model based on the 'modelbparesults' delta table which contains the Best Practice Analyzer results.
@@ -209,16 +207,15 @@ def create_model_bpa_semantic_model(
209
207
  ----------
210
208
  dataset : str, default='ModelBPA'
211
209
  Name of the semantic model to be created.
212
- lakehouse : str, default=None
210
+ lakehouse : str | uuid.UUID, default=None
213
211
  Name of the Fabric lakehouse which contains the 'modelbparesults' delta table.
214
212
  Defaults to None which resolves to the default lakehouse attached to the notebook.
215
- lakehouse_workspace : str, default=None
213
+ lakehouse_workspace : str | uuid.UUID, default=None
216
214
  The workspace in which the lakehouse resides.
217
215
  Defaults to None which resolves to the workspace of the attached lakehouse
218
216
  or if no lakehouse attached, resolves to the workspace of the notebook.
219
217
  """
220
218
 
221
- from sempy_labs._helper_functions import resolve_lakehouse_name
222
219
  from sempy_labs.directlake import (
223
220
  generate_shared_expression,
224
221
  add_table_to_direct_lake_semantic_model,
@@ -226,22 +223,21 @@ def create_model_bpa_semantic_model(
226
223
  from sempy_labs import create_blank_semantic_model, refresh_semantic_model
227
224
  from sempy_labs.tom import connect_semantic_model
228
225
 
229
- lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace)
230
-
231
- if lakehouse is None:
232
- lakehouse_id = fabric.get_lakehouse_id()
233
- lakehouse = resolve_lakehouse_name(
234
- lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
235
- )
226
+ lakehouse_workspace_id = resolve_workspace_id(workspace=lakehouse_workspace)
227
+ (lakehouse_id, lakehouse_name) = resolve_lakehouse_name_and_id(
228
+ lakehouse=lakehouse, workspace=lakehouse_workspace_id
229
+ )
236
230
 
237
231
  # Generate the shared expression based on the lakehouse and lakehouse workspace
238
232
  expr = generate_shared_expression(
239
- item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
233
+ item_name=lakehouse_name,
234
+ item_type="Lakehouse",
235
+ workspace=lakehouse_workspace_id,
240
236
  )
241
237
 
242
238
  # Create blank model
243
239
  create_blank_semantic_model(
244
- dataset=dataset, workspace=lakehouse_workspace, overwrite=True
240
+ dataset=dataset, workspace=lakehouse_workspace_id, overwrite=True
245
241
  )
246
242
 
247
243
  @retry(
@@ -250,7 +246,7 @@ def create_model_bpa_semantic_model(
250
246
  )
251
247
  def dyn_connect():
252
248
  with connect_semantic_model(
253
- dataset=dataset, readonly=True, workspace=lakehouse_workspace
249
+ dataset=dataset, readonly=True, workspace=lakehouse_workspace_id
254
250
  ) as tom:
255
251
 
256
252
  tom.model
@@ -259,7 +255,7 @@ def create_model_bpa_semantic_model(
259
255
  icons.sll_tags.append("ModelBPABulk")
260
256
  table_exists = False
261
257
  with connect_semantic_model(
262
- dataset=dataset, readonly=False, workspace=lakehouse_workspace
258
+ dataset=dataset, readonly=False, workspace=lakehouse_workspace_id
263
259
  ) as tom:
264
260
  t_name = "BPAResults"
265
261
  t_name_full = f"'{t_name}'"
@@ -274,11 +270,11 @@ def create_model_bpa_semantic_model(
274
270
  dataset=dataset,
275
271
  table_name=t_name,
276
272
  lakehouse_table_name="modelbparesults",
277
- workspace=lakehouse_workspace,
273
+ workspace=lakehouse_workspace_id,
278
274
  refresh=False,
279
275
  )
280
276
  with connect_semantic_model(
281
- dataset=dataset, readonly=False, workspace=lakehouse_workspace
277
+ dataset=dataset, readonly=False, workspace=lakehouse_workspace_id
282
278
  ) as tom:
283
279
  # Fix column names
284
280
  for c in tom.all_columns():
@@ -377,4 +373,4 @@ def create_model_bpa_semantic_model(
377
373
  # tom.add_measure(table_name=t_name, measure_name='Rules Followed', expression="[Rules] - [Rules Violated]")
378
374
 
379
375
  # Refresh the model
380
- refresh_semantic_model(dataset=dataset, workspace=lakehouse_workspace)
376
+ refresh_semantic_model(dataset=dataset, workspace=lakehouse_workspace_id)
@@ -5,6 +5,7 @@ from sempy._utils._log import log
5
5
  from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  resolve_dataset_name_and_id,
8
+ resolve_workspace_id,
8
9
  )
9
10
  import sempy_labs._icons as icons
10
11
  from uuid import UUID
@@ -43,7 +44,7 @@ def export_model_to_onelake(
43
44
  destination_workspace = workspace_name
44
45
  destination_workspace_id = workspace_id
45
46
  else:
46
- destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
47
+ destination_workspace_id = resolve_workspace_id(workspace=destination_workspace)
47
48
 
48
49
  tmsl = f"""
49
50
  {{
@@ -7,6 +7,7 @@ from sempy_labs._helper_functions import (
7
7
  _update_dataframe_datatypes,
8
8
  resolve_workspace_name_and_id,
9
9
  resolve_dataset_name_and_id,
10
+ delete_item,
10
11
  )
11
12
  import sempy_labs._icons as icons
12
13
 
@@ -115,3 +116,22 @@ def enable_semantic_model_scheduled_refresh(
115
116
  print(
116
117
  f"{icons.green_dot} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace has been enabled."
117
118
  )
119
+
120
+
121
+ def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] = None):
122
+ """
123
+ Deletes a semantic model.
124
+
125
+ This is a wrapper function for the following API: `Items - Delete Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/delete-semantic-model>`_.
126
+
127
+ Parameters
128
+ ----------
129
+ dataset: str | uuid.UUID
130
+ Name or ID of the semantic model.
131
+ workspace : str | uuid.UUID, default=None
132
+ The Fabric workspace name or ID.
133
+ Defaults to None which resolves to the workspace of the attached lakehouse
134
+ or if no lakehouse attached, resolves to the workspace of the notebook.
135
+ """
136
+
137
+ delete_item(item=dataset, type="SemanticModel", workspace=workspace)
sempy_labs/_sql.py CHANGED
@@ -60,14 +60,18 @@ class ConnectBase:
60
60
  item=item, workspace=workspace_id, type=endpoint_type.capitalize()
61
61
  )
62
62
 
63
+ endpoint_for_url = (
64
+ "sqlDatabases" if endpoint_type == "sqldatabase" else f"{endpoint_type}s"
65
+ )
66
+
63
67
  # Get the TDS endpoint
64
68
  response = _base_api(
65
- request=f"v1/workspaces/{workspace_id}/{endpoint_type}s/{resource_id}"
69
+ request=f"v1/workspaces/{workspace_id}/{endpoint_for_url}/{resource_id}"
66
70
  )
67
71
 
68
72
  if endpoint_type == "warehouse":
69
73
  tds_endpoint = response.json().get("properties", {}).get("connectionString")
70
- if endpoint_type == "sqldatabase":
74
+ elif endpoint_type == "sqldatabase":
71
75
  tds_endpoint = response.json().get("properties", {}).get("serverFqdn")
72
76
  else:
73
77
  tds_endpoint = (
@@ -1,70 +1,69 @@
1
- import sempy.fabric as fabric
2
1
  from sempy_labs._helper_functions import (
3
2
  resolve_workspace_name_and_id,
4
3
  _base_api,
5
4
  _create_dataframe,
6
5
  _update_dataframe_datatypes,
6
+ create_item,
7
+ delete_item,
7
8
  )
8
9
  import pandas as pd
9
10
  from typing import Optional
10
- import sempy_labs._icons as icons
11
11
  from uuid import UUID
12
12
 
13
- ## Still debugging the creation of an sql database
14
- # def create_warehouse(
15
- # warehouse: str,
16
- # description: Optional[str] = None,
17
- # case_insensitive_collation: bool = False,
18
- # workspace: Optional[str | UUID] = None,
19
- # ):
20
- # """
21
- # Creates a Fabric warehouse.
22
-
23
- # This is a wrapper function for the following API: `Items - Create Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/create-warehouse>`_.
24
-
25
- # Parameters
26
- # ----------
27
- # warehouse: str
28
- # Name of the warehouse.
29
- # description : str, default=None
30
- # A description of the warehouse.
31
- # case_insensitive_collation: bool, default=False
32
- # If True, creates the warehouse with case-insensitive collation.
33
- # workspace : str | uuid.UUID, default=None
34
- # The Fabric workspace name or ID.
35
- # Defaults to None which resolves to the workspace of the attached lakehouse
36
- # or if no lakehouse attached, resolves to the workspace of the notebook.
37
- # """
38
-
39
- # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
-
41
- # payload = {"displayName": warehouse}
42
-
43
- # if description:
44
- # payload["description"] = description
45
- # if case_insensitive_collation:
46
- # payload.setdefault("creationPayload", {})
47
- # payload["creationPayload"][
48
- # "defaultCollation"
49
- # ] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
50
-
51
- # _base_api(
52
- # request=f"/v1/workspaces/{workspace_id}/warehouses",
53
- # payload=payload,
54
- # method="post",
55
- # lro_return_status_code=True,
56
- # status_codes=[201, 202],
57
- # )
58
-
59
- # print(
60
- # f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
61
- # )
62
-
63
-
64
- def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
13
+
14
+ def create_sql_database(
15
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
16
+ ):
17
+ """
18
+ Creates a SQL database.
19
+
20
+ This is a wrapper function for the following API: `Items - Create SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/create-sql-database>`_.
21
+
22
+ Parameters
23
+ ----------
24
+ name: str
25
+ Name of the SQL database.
26
+ description : str, default=None
27
+ A description of the SQL database.
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ """
33
+
34
+ create_item(
35
+ name=name, description=description, type="SQLDatabase", workspace=workspace
36
+ )
37
+
38
+
39
+ def delete_sql_database(
40
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
41
+ ):
42
+ """
43
+ Deletes a SQL Database.
44
+
45
+ This is a wrapper function for the following API: `Items - Delete SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/delete-sql-database>`_.
46
+
47
+ Parameters
48
+ ----------
49
+ sql_database: str | uuid.UUID
50
+ Name of the SQL database.
51
+ workspace : str | uuid.UUID, default=None
52
+ The Fabric workspace name or ID.
53
+ Defaults to None which resolves to the workspace of the attached lakehouse
54
+ or if no lakehouse attached, resolves to the workspace of the notebook.
65
55
  """
66
- Shows the databses within a workspace.
67
56
 
57
+ delete_item(item=sql_database, type="SQLDatabase", workspace=workspace)
58
+
59
+
60
+ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
61
+ """
62
+ Lists all SQL databases in the Fabric workspace.
63
+
64
+ This is a wrapper function for the following API: `Items - List SQL Databases <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/list-sql-databases>`_.
65
+
66
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
68
67
 
69
68
  Parameters
70
69
  ----------
@@ -76,45 +75,39 @@ def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
76
75
  Returns
77
76
  -------
78
77
  pandas.DataFrame
79
- A pandas dataframe showing the SQLDabatases within a workspace.
78
+ A pandas dataframe showing a list of SQL databases in the Fabric workspace.
80
79
  """
81
80
 
81
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
82
+
82
83
  columns = {
83
84
  "SQL Database Name": "string",
84
85
  "SQL Database Id": "string",
85
86
  "Description": "string",
86
- "Connection Type": "string",
87
87
  "Connection Info": "string",
88
88
  "Database Name": "string",
89
89
  "Server FQDN": "string",
90
- "Provisioning Status": "string",
91
- "Created Date": "datetime",
92
- "Last Updated Time UTC": "datetime",
93
90
  }
94
91
  df = _create_dataframe(columns=columns)
95
92
 
96
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
97
-
98
93
  responses = _base_api(
99
- request=f"/v1/workspaces/{workspace_id}/sqldatabases", uses_pagination=True
94
+ request=f"/v1/workspaces/{workspace_id}/SQLDatabases",
95
+ uses_pagination=True,
96
+ client="fabric_sp",
100
97
  )
101
98
 
102
99
  for r in responses:
103
100
  for v in r.get("value", []):
104
101
  prop = v.get("properties", {})
105
-
106
102
  new_data = {
107
103
  "SQL Database Name": v.get("displayName"),
108
104
  "SQL Database Id": v.get("id"),
109
105
  "Description": v.get("description"),
110
- "Connection Type": v.get("type"),
111
106
  "Connection Info": prop.get("connectionInfo"),
112
107
  "Database Name": prop.get("databaseName"),
113
108
  "Server FQDN": prop.get("serverFqdn"),
114
- "Provisioning Status": prop.get("provisioningState"),
115
- "Created Date": prop.get("createdDate"),
116
- "Last Updated Time UTC": prop.get("lastUpdatedTimeUtc"),
117
109
  }
110
+
118
111
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
119
112
 
120
113
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
@@ -122,38 +115,6 @@ def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
122
115
  return df
123
116
 
124
117
 
125
- ## Still debugging the deletion of an sql database
126
- # def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
127
- # """
128
- # Deletes a Fabric warehouse.
129
-
130
- # This is a wrapper function for the following API: `Items - Delete Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/delete-warehouse>`_.
131
-
132
- # Parameters
133
- # ----------
134
- # name: str
135
- # Name of the warehouse.
136
- # workspace : str | uuid.UUID, default=None
137
- # The Fabric workspace name or ID.
138
- # Defaults to None which resolves to the workspace of the attached lakehouse
139
- # or if no lakehouse attached, resolves to the workspace of the notebook.
140
- # """
141
-
142
- # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
143
-
144
- # item_id = fabric.resolve_item_id(
145
- # item_name=name, type="Warehouse", workspace=workspace_id
146
- # )
147
-
148
- # _base_api(
149
- # request=f"/v1/workspaces/{workspace_id}/warehouses/{item_id}", method="delete"
150
- # )
151
-
152
- # print(
153
- # f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
154
- # )
155
-
156
-
157
118
  def get_sql_database_tables(
158
119
  sql_database: str | UUID, workspace: Optional[str | UUID] = None
159
120
  ) -> pd.DataFrame:
sempy_labs/_vertipaq.py CHANGED
@@ -15,6 +15,8 @@ from sempy_labs._helper_functions import (
15
15
  resolve_workspace_name_and_id,
16
16
  resolve_dataset_name_and_id,
17
17
  _create_spark_session,
18
+ resolve_workspace_id,
19
+ resolve_workspace_name,
18
20
  )
19
21
  from sempy_labs._list_functions import list_relationships, list_tables
20
22
  from sempy_labs.lakehouse import lakehouse_attached, get_lakehouse_tables
@@ -196,8 +198,10 @@ def vertipaq_analyzer(
196
198
  & (~dfC["Column Name"].str.startswith("RowNumber-"))
197
199
  ]
198
200
 
199
- object_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
200
- current_workspace_id = fabric.get_workspace_id()
201
+ object_workspace = resolve_workspace_name(
202
+ workspace_id=lakehouse_workspace_id
203
+ )
204
+ current_workspace_id = resolve_workspace_id()
201
205
  if current_workspace_id != lakehouse_workspace_id:
202
206
  lakeTables = get_lakehouse_tables(
203
207
  lakehouse=lakehouse_name, workspace=object_workspace
@@ -526,22 +530,15 @@ def vertipaq_analyzer(
526
530
  )
527
531
 
528
532
  if export == "table":
529
- lakehouse_id = fabric.get_lakehouse_id()
530
- lake_workspace = fabric.resolve_workspace_name()
531
- lakehouse = resolve_lakehouse_name(
532
- lakehouse_id=lakehouse_id, workspace=lake_workspace
533
- )
534
533
  lakeTName = "vertipaqanalyzer_model"
535
534
 
536
- lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
535
+ lakeT = get_lakehouse_tables()
537
536
  lakeT_filt = lakeT[lakeT["Table Name"] == lakeTName]
538
537
 
539
538
  if len(lakeT_filt) == 0:
540
539
  runId = 1
541
540
  else:
542
- max_run_id = _get_column_aggregate(
543
- lakehouse=lakehouse, table_name=lakeTName
544
- )
541
+ max_run_id = _get_column_aggregate(table_name=lakeTName)
545
542
  runId = max_run_id + 1
546
543
 
547
544
  dfMap = {
sempy_labs/_warehouses.py CHANGED
@@ -16,7 +16,7 @@ def create_warehouse(
16
16
  description: Optional[str] = None,
17
17
  case_insensitive_collation: bool = False,
18
18
  workspace: Optional[str | UUID] = None,
19
- ):
19
+ ) -> UUID:
20
20
  """
21
21
  Creates a Fabric warehouse.
22
22
 
@@ -34,6 +34,11 @@ def create_warehouse(
34
34
  The Fabric workspace name or ID.
35
35
  Defaults to None which resolves to the workspace of the attached lakehouse
36
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
37
+
38
+ Returns
39
+ -------
40
+ uuid.UUID
41
+ The ID of the created warehouse.
37
42
  """
38
43
 
39
44
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -48,7 +53,7 @@ def create_warehouse(
48
53
  "defaultCollation"
49
54
  ] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
50
55
 
51
- _base_api(
56
+ response = _base_api(
52
57
  request=f"/v1/workspaces/{workspace_id}/warehouses",
53
58
  payload=payload,
54
59
  method="post",
@@ -60,6 +65,8 @@ def create_warehouse(
60
65
  f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
61
66
  )
62
67
 
68
+ return response.get("id")
69
+
63
70
 
64
71
  def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
65
72
  """
@@ -67,6 +74,8 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
67
74
 
68
75
  This is a wrapper function for the following API: `Items - List Warehouses <https://learn.microsoft.com/rest/api/fabric/warehouse/items/list-warehouses>`_.
69
76
 
77
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
78
+
70
79
  Parameters
71
80
  ----------
72
81
  workspace : str | uuid.UUID, default=None
@@ -93,7 +102,9 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
93
102
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
94
103
 
95
104
  responses = _base_api(
96
- request=f"/v1/workspaces/{workspace_id}/warehouses", uses_pagination=True
105
+ request=f"/v1/workspaces/{workspace_id}/warehouses",
106
+ uses_pagination=True,
107
+ client="fabric_sp",
97
108
  )
98
109
 
99
110
  for r in responses: