semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,119 @@
1
+ import pandas as pd
2
+ import json
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_name_and_id,
6
+ _base_api,
7
+ _create_dataframe,
8
+ _update_dataframe_datatypes,
9
+ resolve_item_id,
10
+ _decode_b64,
11
+ delete_item,
12
+ get_item_definition,
13
+ )
14
+
15
+ from uuid import UUID
16
+
17
+
18
+ def list_mounted_data_factories(
19
+ workspace: Optional[str | UUID] = None,
20
+ ) -> pd.DataFrame:
21
+ """
22
+ Shows a list of mounted data factories from the specified workspace.
23
+
24
+ This is a wrapper function for the following API: `Items - List Mounted Data Factories <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/list-mounted-data-factories>`_.
25
+
26
+ Parameters
27
+ ----------
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+
33
+ Returns
34
+ -------
35
+ pandas.DataFrame
36
+ A pandas dataframe showing a list of mounted data factories from the specified workspace.
37
+ """
38
+
39
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+
41
+ columns = {
42
+ "Mounted Data Factory Name": "str",
43
+ "Mounted Data Factory Id": "str",
44
+ "Description": "str",
45
+ }
46
+
47
+ df = _create_dataframe(columns=columns)
48
+ responses = _base_api(
49
+ request=f"/v1/workspaces/{workspace_id}/mountedDataFactories",
50
+ uses_pagination=True,
51
+ )
52
+
53
+ for r in responses:
54
+ for v in r.get("value", []):
55
+ new_data = {
56
+ "Mounted Data Factory Name": v.get("displayName"),
57
+ "Mounted Data Factory Id": v.get("id"),
58
+ "Description": v.get("description"),
59
+ }
60
+
61
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
62
+
63
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
64
+
65
+ return df
66
+
67
+
68
+ def get_mounted_data_factory_definition(
69
+ mounted_data_factory: str | UUID, workspace: Optional[str | UUID] = None
70
+ ) -> dict:
71
+ """
72
+ Returns the specified MountedDataFactory public definition.
73
+
74
+ This is a wrapper function for the following API: `Items - Get Mounted Data Factory Definition <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/get-mounted-data-factory-definition>`_.
75
+
76
+ Parameters
77
+ ----------
78
+ mounted_data_factory : str | uuid.UUID
79
+ The name or ID of the mounted data factory.
80
+ workspace : str | uuid.UUID, default=None
81
+ The Fabric workspace name or ID.
82
+ Defaults to None which resolves to the workspace of the attached lakehouse
83
+ or if no lakehouse attached, resolves to the workspace of the notebook.
84
+
85
+ Returns
86
+ -------
87
+ dict
88
+ The 'mountedDataFactory-content.json' file from the mounted data factory definition.
89
+ """
90
+
91
+ return get_item_definition(
92
+ item=mounted_data_factory,
93
+ type="MountedDataFactory",
94
+ workspace=workspace,
95
+ return_dataframe=False,
96
+ )
97
+
98
+
99
+ def delete_mounted_data_factory(
100
+ mounted_data_factory: str | UUID, workspace: Optional[str | UUID]
101
+ ):
102
+ """
103
+ Deletes the specified mounted data factory.
104
+
105
+ This is a wrapper function for the following API: `Items - Delete Mounted Data Factory <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/delete-mounted-data-factory>`_.
106
+
107
+ Parameters
108
+ ----------
109
+ mounted_data_factory : str | uuid.UUID
110
+ The name or ID of the mounted data factory.
111
+ workspace : str | uuid.UUID, default=None
112
+ The Fabric workspace name or ID.
113
+ Defaults to None which resolves to the workspace of the attached lakehouse
114
+ or if no lakehouse attached, resolves to the workspace of the notebook.
115
+ """
116
+
117
+ delete_item(
118
+ item=mounted_data_factory, type="MountedDataFactory", workspace=workspace
119
+ )
sempy_labs/_notebooks.py CHANGED
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
10
10
  _decode_b64,
11
11
  _base_api,
12
12
  resolve_item_id,
13
+ create_item,
13
14
  )
14
15
  from sempy.fabric.exceptions import FabricHTTPException
15
16
  import os
@@ -183,35 +184,24 @@ def create_notebook(
183
184
  or if no lakehouse attached, resolves to the workspace of the notebook.
184
185
  """
185
186
 
186
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
187
187
  notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
188
-
189
- payload = {
190
- "displayName": name,
191
- "definition": {
192
- "format": "ipynb",
193
- "parts": [
194
- {
195
- "path": f"{_notebook_prefix}.{type}",
196
- "payload": notebook_payload,
197
- "payloadType": "InlineBase64",
198
- }
199
- ],
200
- },
188
+ definition_payload = {
189
+ "format": "ipynb",
190
+ "parts": [
191
+ {
192
+ "path": f"{_notebook_prefix}.{type}",
193
+ "payload": notebook_payload,
194
+ "payloadType": "InlineBase64",
195
+ }
196
+ ],
201
197
  }
202
- if description is not None:
203
- payload["description"] = description
204
198
 
205
- _base_api(
206
- request=f"v1/workspaces/{workspace_id}/notebooks",
207
- payload=payload,
208
- method="post",
209
- lro_return_status_code=True,
210
- status_codes=[201, 202],
211
- )
212
-
213
- print(
214
- f"{icons.green_dot} The '{name}' notebook was created within the '{workspace_name}' workspace."
199
+ create_item(
200
+ name=name,
201
+ type="Notebook",
202
+ workspace=workspace,
203
+ description=description,
204
+ definition=definition_payload,
215
205
  )
216
206
 
217
207
 
@@ -0,0 +1,117 @@
1
+ from uuid import UUID
2
+ from typing import Optional
3
+ import pandas as pd
4
+ from sempy_labs._helper_functions import (
5
+ _create_dataframe,
6
+ _base_api,
7
+ _update_dataframe_datatypes,
8
+ resolve_workspace_name_and_id,
9
+ resolve_dataset_name_and_id,
10
+ )
11
+ import sempy_labs._icons as icons
12
+
13
+
14
+ def get_semantic_model_refresh_schedule(
15
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
16
+ ) -> pd.DataFrame:
17
+ """
18
+ Gets the refresh schedule for the specified dataset from the specified workspace.
19
+
20
+ Parameters
21
+ ----------
22
+ dataset : str | uuid.UUID
23
+ Name or ID of the semantic model.
24
+ workspace : str | uuid.UUID, default=None
25
+ The workspace name or ID.
26
+ Defaults to None which resolves to the workspace of the attached lakehouse
27
+ or if no lakehouse attached, resolves to the workspace of the notebook.
28
+
29
+ Returns
30
+ -------
31
+ pandas.DataFrame
32
+ Shows the refresh schedule for the specified dataset from the specified workspace.
33
+ """
34
+
35
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
37
+
38
+ columns = {
39
+ "Days": "str",
40
+ "Times": "str",
41
+ "Enabled": "bool",
42
+ "Local Time Zone Id": "str",
43
+ "Notify Option": "str",
44
+ }
45
+
46
+ column_map = {
47
+ "days": "Days",
48
+ "times": "Times",
49
+ "enabled": "Enabled",
50
+ "localTimeZoneId": "Local Time Zone Id",
51
+ "notifyOption": "Notify Option",
52
+ }
53
+
54
+ df = _create_dataframe(columns)
55
+
56
+ result = _base_api(
57
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule"
58
+ ).json()
59
+
60
+ df = (
61
+ pd.json_normalize(result)
62
+ .drop(columns=["@odata.context"], errors="ignore")
63
+ .rename(columns=column_map)
64
+ )
65
+
66
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
67
+
68
+ return df
69
+
70
+
71
+ def enable_semantic_model_scheduled_refresh(
72
+ dataset: str | UUID,
73
+ workspace: Optional[str | UUID] = None,
74
+ enable: bool = True,
75
+ ):
76
+ """
77
+ Enables the scheduled refresh for the specified dataset from the specified workspace.
78
+
79
+ Parameters
80
+ ----------
81
+ dataset : str | uuid.UUID
82
+ Name or ID of the semantic model.
83
+ workspace : str | uuid.UUID, default=None
84
+ The workspace name or ID.
85
+ Defaults to None which resolves to the workspace of the attached lakehouse
86
+ or if no lakehouse attached, resolves to the workspace of the notebook.
87
+ enable : bool, default=True
88
+ If True, enables the scheduled refresh.
89
+ If False, disables the scheduled refresh.
90
+ """
91
+
92
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
93
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
94
+
95
+ df = get_semantic_model_refresh_schedule(dataset=dataset, workspace=workspace)
96
+ status = df["Enabled"].iloc[0]
97
+
98
+ if enable and status:
99
+ print(
100
+ f"{icons.info} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace is already enabled."
101
+ )
102
+ elif not enable and not status:
103
+ print(
104
+ f"{icons.info} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace is already disabled."
105
+ )
106
+ else:
107
+ payload = {"value": {"enabled": enable}}
108
+
109
+ _base_api(
110
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
111
+ method="patch",
112
+ payload=payload,
113
+ )
114
+
115
+ print(
116
+ f"{icons.green_dot} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace has been enabled."
117
+ )
sempy_labs/_sql.py CHANGED
@@ -34,7 +34,7 @@ def _bytes2mswin_bstr(value: bytes) -> bytes:
34
34
  class ConnectBase:
35
35
  def __init__(
36
36
  self,
37
- item: str,
37
+ item: str | UUID,
38
38
  workspace: Optional[Union[str, UUID]] = None,
39
39
  timeout: Optional[int] = None,
40
40
  endpoint_type: str = "warehouse",
@@ -45,13 +45,19 @@ class ConnectBase:
45
45
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
46
46
 
47
47
  # Resolve the appropriate ID and name (warehouse or lakehouse)
48
- if endpoint_type == "warehouse":
48
+ if endpoint_type == "sqldatabase":
49
+ # SQLDatabase is has special case for resolving the name and id
49
50
  (resource_name, resource_id) = resolve_item_name_and_id(
50
- item=item, type=endpoint_type.capitalize(), workspace=workspace_id
51
+ item=item, type="SQLDatabase", workspace=workspace_id
51
52
  )
52
- else:
53
+ elif endpoint_type == "lakehouse":
53
54
  (resource_name, resource_id) = resolve_lakehouse_name_and_id(
54
- lakehouse=item, workspace=workspace_id
55
+ lakehouse=item,
56
+ workspace=workspace_id,
57
+ )
58
+ else:
59
+ (resource_name, resource_id) = resolve_item_name_and_id(
60
+ item=item, workspace=workspace_id, type=endpoint_type.capitalize()
55
61
  )
56
62
 
57
63
  # Get the TDS endpoint
@@ -61,6 +67,8 @@ class ConnectBase:
61
67
 
62
68
  if endpoint_type == "warehouse":
63
69
  tds_endpoint = response.json().get("properties", {}).get("connectionString")
70
+ if endpoint_type == "sqldatabase":
71
+ tds_endpoint = response.json().get("properties", {}).get("serverFqdn")
64
72
  else:
65
73
  tds_endpoint = (
66
74
  response.json()
@@ -72,7 +80,10 @@ class ConnectBase:
72
80
  # Set up the connection string
73
81
  access_token = SynapseTokenProvider()()
74
82
  tokenstruct = _bytes2mswin_bstr(access_token.encode())
75
- conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name};Encrypt=Yes;"
83
+ if endpoint_type == "sqldatabase":
84
+ conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name}-{resource_id};Encrypt=Yes;"
85
+ else:
86
+ conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name};Encrypt=Yes;"
76
87
 
77
88
  if timeout is not None:
78
89
  conn_str += f"Connect Timeout={timeout};"
@@ -141,10 +152,24 @@ class ConnectBase:
141
152
  class ConnectWarehouse(ConnectBase):
142
153
  def __init__(
143
154
  self,
144
- warehouse: str,
155
+ warehouse: str | UUID,
145
156
  workspace: Optional[Union[str, UUID]] = None,
146
- timeout: Optional[int] = None,
157
+ timeout: int = 30,
147
158
  ):
159
+ """
160
+ Run a SQL or T-SQL query against a Fabric Warehouse.
161
+
162
+ Parameters
163
+ ----------
164
+ warehouse : str | uuid.UUID
165
+ The name or ID of the Fabric warehouse.
166
+ workspace : str | uuid.UUID, default=None
167
+ The name or ID of the workspace.
168
+ Defaults to None which resolves to the workspace of the attached lakehouse
169
+ or if no lakehouse attached, resolves to the workspace of the notebook.
170
+ timeout : int, default=30
171
+ The timeout for the connection in seconds.
172
+ """
148
173
  super().__init__(
149
174
  item=warehouse,
150
175
  workspace=workspace,
@@ -156,13 +181,56 @@ class ConnectWarehouse(ConnectBase):
156
181
  class ConnectLakehouse(ConnectBase):
157
182
  def __init__(
158
183
  self,
159
- lakehouse: str,
184
+ lakehouse: str | UUID,
160
185
  workspace: Optional[Union[str, UUID]] = None,
161
- timeout: Optional[int] = None,
186
+ timeout: int = 30,
162
187
  ):
188
+ """
189
+ Run a SQL or T-SQL query against a Fabric lakehouse.
190
+
191
+ Parameters
192
+ ----------
193
+ lakehouse : str | uuid.UUID
194
+ The name or ID of the Fabric lakehouse.
195
+ workspace : str | uuid.UUID, default=None
196
+ The name or ID of the workspace.
197
+ Defaults to None which resolves to the workspace of the attached lakehouse
198
+ or if no lakehouse attached, resolves to the workspace of the notebook.
199
+ timeout : int, default=30
200
+ The timeout for the connection in seconds.
201
+ """
163
202
  super().__init__(
164
203
  item=lakehouse,
165
204
  workspace=workspace,
166
205
  timeout=timeout,
167
206
  endpoint_type="lakehouse",
168
207
  )
208
+
209
+
210
+ class ConnectSQLDatabase(ConnectBase):
211
+ def __init__(
212
+ self,
213
+ sql_database: str | UUID,
214
+ workspace: Optional[Union[str, UUID]] = None,
215
+ timeout: int = 30,
216
+ ):
217
+ """
218
+ Run a SQL or T-SQL query against a Fabric SQL database.
219
+
220
+ Parameters
221
+ ----------
222
+ sql_database : str | uuid.UUID
223
+ The name or ID of the Fabric SQL database.
224
+ workspace : str | uuid.UUID, default=None
225
+ The name or ID of the workspace.
226
+ Defaults to None which resolves to the workspace of the attached lakehouse
227
+ or if no lakehouse attached, resolves to the workspace of the notebook.
228
+ timeout : int, default=30
229
+ The timeout for the connection in seconds.
230
+ """
231
+ super().__init__(
232
+ item=sql_database,
233
+ workspace=workspace,
234
+ timeout=timeout,
235
+ endpoint_type="sqldatabase",
236
+ )
@@ -0,0 +1,227 @@
1
+ import sempy.fabric as fabric
2
+ from sempy_labs._helper_functions import (
3
+ resolve_workspace_name_and_id,
4
+ _base_api,
5
+ _create_dataframe,
6
+ _update_dataframe_datatypes,
7
+ )
8
+ import pandas as pd
9
+ from typing import Optional
10
+ import sempy_labs._icons as icons
11
+ from uuid import UUID
12
+
13
+ ## Still debugging the creation of an sql database
14
+ # def create_warehouse(
15
+ # warehouse: str,
16
+ # description: Optional[str] = None,
17
+ # case_insensitive_collation: bool = False,
18
+ # workspace: Optional[str | UUID] = None,
19
+ # ):
20
+ # """
21
+ # Creates a Fabric warehouse.
22
+
23
+ # This is a wrapper function for the following API: `Items - Create Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/create-warehouse>`_.
24
+
25
+ # Parameters
26
+ # ----------
27
+ # warehouse: str
28
+ # Name of the warehouse.
29
+ # description : str, default=None
30
+ # A description of the warehouse.
31
+ # case_insensitive_collation: bool, default=False
32
+ # If True, creates the warehouse with case-insensitive collation.
33
+ # workspace : str | uuid.UUID, default=None
34
+ # The Fabric workspace name or ID.
35
+ # Defaults to None which resolves to the workspace of the attached lakehouse
36
+ # or if no lakehouse attached, resolves to the workspace of the notebook.
37
+ # """
38
+
39
+ # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+
41
+ # payload = {"displayName": warehouse}
42
+
43
+ # if description:
44
+ # payload["description"] = description
45
+ # if case_insensitive_collation:
46
+ # payload.setdefault("creationPayload", {})
47
+ # payload["creationPayload"][
48
+ # "defaultCollation"
49
+ # ] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
50
+
51
+ # _base_api(
52
+ # request=f"/v1/workspaces/{workspace_id}/warehouses",
53
+ # payload=payload,
54
+ # method="post",
55
+ # lro_return_status_code=True,
56
+ # status_codes=[201, 202],
57
+ # )
58
+
59
+ # print(
60
+ # f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
61
+ # )
62
+
63
+
64
+ def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
65
+ """
66
+ Shows the databses within a workspace.
67
+
68
+
69
+ Parameters
70
+ ----------
71
+ workspace : str | uuid.UUID, default=None
72
+ The Fabric workspace name or ID.
73
+ Defaults to None which resolves to the workspace of the attached lakehouse
74
+ or if no lakehouse attached, resolves to the workspace of the notebook.
75
+
76
+ Returns
77
+ -------
78
+ pandas.DataFrame
79
+ A pandas dataframe showing the SQLDabatases within a workspace.
80
+ """
81
+
82
+ columns = {
83
+ "SQL Database Name": "string",
84
+ "SQL Database Id": "string",
85
+ "Description": "string",
86
+ "Connection Type": "string",
87
+ "Connection Info": "string",
88
+ "Database Name": "string",
89
+ "Server FQDN": "string",
90
+ "Provisioning Status": "string",
91
+ "Created Date": "datetime",
92
+ "Last Updated Time UTC": "datetime",
93
+ }
94
+ df = _create_dataframe(columns=columns)
95
+
96
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
97
+
98
+ responses = _base_api(
99
+ request=f"/v1/workspaces/{workspace_id}/sqldatabases", uses_pagination=True
100
+ )
101
+
102
+ for r in responses:
103
+ for v in r.get("value", []):
104
+ prop = v.get("properties", {})
105
+
106
+ new_data = {
107
+ "SQL Database Name": v.get("displayName"),
108
+ "SQL Database Id": v.get("id"),
109
+ "Description": v.get("description"),
110
+ "Connection Type": v.get("type"),
111
+ "Connection Info": prop.get("connectionInfo"),
112
+ "Database Name": prop.get("databaseName"),
113
+ "Server FQDN": prop.get("serverFqdn"),
114
+ "Provisioning Status": prop.get("provisioningState"),
115
+ "Created Date": prop.get("createdDate"),
116
+ "Last Updated Time UTC": prop.get("lastUpdatedTimeUtc"),
117
+ }
118
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
119
+
120
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
121
+
122
+ return df
123
+
124
+
125
+ ## Still debugging the deletion of an sql database
126
+ # def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
127
+ # """
128
+ # Deletes a Fabric warehouse.
129
+
130
+ # This is a wrapper function for the following API: `Items - Delete Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/delete-warehouse>`_.
131
+
132
+ # Parameters
133
+ # ----------
134
+ # name: str
135
+ # Name of the warehouse.
136
+ # workspace : str | uuid.UUID, default=None
137
+ # The Fabric workspace name or ID.
138
+ # Defaults to None which resolves to the workspace of the attached lakehouse
139
+ # or if no lakehouse attached, resolves to the workspace of the notebook.
140
+ # """
141
+
142
+ # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
143
+
144
+ # item_id = fabric.resolve_item_id(
145
+ # item_name=name, type="Warehouse", workspace=workspace_id
146
+ # )
147
+
148
+ # _base_api(
149
+ # request=f"/v1/workspaces/{workspace_id}/warehouses/{item_id}", method="delete"
150
+ # )
151
+
152
+ # print(
153
+ # f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
154
+ # )
155
+
156
+
157
+ def get_sql_database_tables(
158
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
159
+ ) -> pd.DataFrame:
160
+ """
161
+ Shows a list of the tables in the Fabric SQLDabatse. This function is based on INFORMATION_SCHEMA.TABLES.
162
+
163
+ Parameters
164
+ ----------
165
+ sql_database : str | uuid.UUID
166
+ Name or ID of the Fabric SQLDabatase.
167
+ workspace : str | uuid.UUID, default=None
168
+ The Fabric workspace name or ID.
169
+ Defaults to None which resolves to the workspace of the attached lakehouse
170
+ or if no lakehouse attached, resolves to the workspace of the notebook.
171
+
172
+ Returns
173
+ -------
174
+ pandas.DataFrame
175
+ A pandas dataframe showing a list of the tables in the Fabric SQLDabatase.
176
+ """
177
+
178
+ from sempy_labs._sql import ConnectSQLDatabase
179
+
180
+ with ConnectSQLDatabase(sql_database=sql_database, workspace=workspace) as sql:
181
+ df = sql.query(
182
+ """
183
+ SELECT TABLE_SCHEMA AS [Schema], TABLE_NAME AS [Table Name], TABLE_TYPE AS [Table Type]
184
+ FROM INFORMATION_SCHEMA.TABLES
185
+ WHERE TABLE_TYPE = 'BASE TABLE'
186
+ """
187
+ )
188
+
189
+ return df
190
+
191
+
192
+ def get_sql_database_columns(
193
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
194
+ ) -> pd.DataFrame:
195
+ """
196
+ Shows a list of the columns in each table within the Fabric SQLDabatase. This function is based on INFORMATION_SCHEMA.COLUMNS.
197
+
198
+ Parameters
199
+ ----------
200
+ sql_database : str | uuid.UUID
201
+ Name or ID of the Fabric SQLDabatase.
202
+ workspace : str | uuid.UUID, default=None
203
+ The Fabric workspace name or ID.
204
+ Defaults to None which resolves to the workspace of the attached lakehouse
205
+ or if no lakehouse attached, resolves to the workspace of the notebook.
206
+
207
+ Returns
208
+ -------
209
+ pandas.DataFrame
210
+ A pandas dataframe showing a list of the columns in each table within the Fabric SQLDabatase.
211
+ """
212
+
213
+ from sempy_labs._sql import ConnectSQLDatabase
214
+
215
+ with ConnectSQLDatabase(sql_database=sql_database, workspace=workspace) as sql:
216
+ df = sql.query(
217
+ """
218
+ SELECT t.TABLE_SCHEMA AS [Schema], t.TABLE_NAME AS [Table Name], c.COLUMN_NAME AS [Column Name], c.DATA_TYPE AS [Data Type], c.IS_NULLABLE AS [Is Nullable], c.CHARACTER_MAXIMUM_LENGTH AS [Character Max Length]
219
+ FROM INFORMATION_SCHEMA.TABLES AS t
220
+ LEFT JOIN INFORMATION_SCHEMA.COLUMNS AS c
221
+ ON t.TABLE_NAME = c.TABLE_NAME
222
+ AND t.TABLE_SCHEMA = c.TABLE_SCHEMA
223
+ WHERE t.TABLE_TYPE = 'BASE TABLE'
224
+ """
225
+ )
226
+
227
+ return df