semantic-link-labs 0.10.1__py3-none-any.whl → 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.0.dist-info}/METADATA +6 -5
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.0.dist-info}/RECORD +94 -92
- sempy_labs/__init__.py +4 -0
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_capacities.py +2 -0
- sempy_labs/_connections.py +11 -0
- sempy_labs/_dashboards.py +9 -4
- sempy_labs/_data_pipelines.py +5 -0
- sempy_labs/_dataflows.py +284 -17
- sempy_labs/_daxformatter.py +2 -0
- sempy_labs/_delta_analyzer_history.py +4 -1
- sempy_labs/_deployment_pipelines.py +4 -0
- sempy_labs/_documentation.py +3 -0
- sempy_labs/_environments.py +10 -1
- sempy_labs/_eventhouses.py +12 -5
- sempy_labs/_eventstreams.py +11 -3
- sempy_labs/_external_data_shares.py +8 -2
- sempy_labs/_gateways.py +26 -5
- sempy_labs/_git.py +11 -0
- sempy_labs/_graphQL.py +10 -3
- sempy_labs/_helper_functions.py +62 -10
- sempy_labs/_job_scheduler.py +54 -7
- sempy_labs/_kql_databases.py +11 -2
- sempy_labs/_kql_querysets.py +11 -3
- sempy_labs/_list_functions.py +17 -2
- sempy_labs/_managed_private_endpoints.py +11 -2
- sempy_labs/_mirrored_databases.py +17 -3
- sempy_labs/_mirrored_warehouses.py +9 -3
- sempy_labs/_ml_experiments.py +11 -3
- sempy_labs/_ml_models.py +11 -3
- sempy_labs/_model_bpa_rules.py +2 -0
- sempy_labs/_mounted_data_factories.py +12 -8
- sempy_labs/_notebooks.py +3 -0
- sempy_labs/_refresh_semantic_model.py +1 -0
- sempy_labs/_semantic_models.py +6 -0
- sempy_labs/_spark.py +7 -0
- sempy_labs/_sql_endpoints.py +54 -31
- sempy_labs/_sqldatabase.py +13 -4
- sempy_labs/_tags.py +5 -1
- sempy_labs/_user_delegation_key.py +2 -0
- sempy_labs/_variable_libraries.py +3 -1
- sempy_labs/_warehouses.py +13 -3
- sempy_labs/_workloads.py +3 -0
- sempy_labs/_workspace_identity.py +3 -0
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_activities.py +6 -5
- sempy_labs/admin/_apps.py +31 -31
- sempy_labs/admin/_artifacts.py +8 -3
- sempy_labs/admin/_basic_functions.py +5 -0
- sempy_labs/admin/_capacities.py +39 -28
- sempy_labs/admin/_datasets.py +51 -51
- sempy_labs/admin/_domains.py +17 -1
- sempy_labs/admin/_external_data_share.py +8 -2
- sempy_labs/admin/_git.py +14 -9
- sempy_labs/admin/_items.py +15 -2
- sempy_labs/admin/_reports.py +64 -65
- sempy_labs/admin/_shared.py +7 -1
- sempy_labs/admin/_tags.py +5 -0
- sempy_labs/admin/_tenant.py +5 -2
- sempy_labs/admin/_users.py +9 -3
- sempy_labs/admin/_workspaces.py +88 -0
- sempy_labs/directlake/_dl_helper.py +2 -0
- sempy_labs/directlake/_generate_shared_expression.py +2 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +2 -4
- sempy_labs/directlake/_get_shared_expression.py +2 -0
- sempy_labs/directlake/_guardrails.py +2 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +2 -0
- sempy_labs/directlake/_warm_cache.py +1 -0
- sempy_labs/graph/_groups.py +22 -7
- sempy_labs/graph/_teams.py +7 -2
- sempy_labs/graph/_users.py +1 -0
- sempy_labs/lakehouse/_blobs.py +1 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +88 -27
- sempy_labs/lakehouse/_helper.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +38 -5
- sempy_labs/lakehouse/_livy_sessions.py +2 -1
- sempy_labs/lakehouse/_shortcuts.py +7 -1
- sempy_labs/migration/_direct_lake_to_import.py +2 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +4 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +2 -0
- sempy_labs/report/_download_report.py +2 -1
- sempy_labs/report/_generate_report.py +2 -0
- sempy_labs/report/_paginated.py +2 -0
- sempy_labs/report/_report_bpa.py +110 -122
- sempy_labs/report/_report_bpa_rules.py +2 -0
- sempy_labs/report/_report_functions.py +7 -0
- sempy_labs/report/_reportwrapper.py +64 -31
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +96 -0
- sempy_labs/tom/_model.py +509 -34
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.0.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.0.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.0.dist-info}/top_level.txt +0 -0
sempy_labs/_model_bpa_rules.py
CHANGED
|
@@ -1,20 +1,19 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
|
-
import json
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
|
-
|
|
4
|
+
resolve_workspace_id,
|
|
6
5
|
_base_api,
|
|
7
6
|
_create_dataframe,
|
|
8
7
|
_update_dataframe_datatypes,
|
|
9
|
-
resolve_item_id,
|
|
10
|
-
_decode_b64,
|
|
11
|
-
delete_item,
|
|
12
8
|
get_item_definition,
|
|
9
|
+
delete_item,
|
|
13
10
|
)
|
|
14
11
|
|
|
15
12
|
from uuid import UUID
|
|
13
|
+
from sempy._utils._log import log
|
|
16
14
|
|
|
17
15
|
|
|
16
|
+
@log
|
|
18
17
|
def list_mounted_data_factories(
|
|
19
18
|
workspace: Optional[str | UUID] = None,
|
|
20
19
|
) -> pd.DataFrame:
|
|
@@ -36,7 +35,7 @@ def list_mounted_data_factories(
|
|
|
36
35
|
A pandas dataframe showing a list of mounted data factories from the specified workspace.
|
|
37
36
|
"""
|
|
38
37
|
|
|
39
|
-
|
|
38
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
40
39
|
|
|
41
40
|
columns = {
|
|
42
41
|
"Mounted Data Factory Name": "str",
|
|
@@ -50,6 +49,7 @@ def list_mounted_data_factories(
|
|
|
50
49
|
uses_pagination=True,
|
|
51
50
|
)
|
|
52
51
|
|
|
52
|
+
dfs = []
|
|
53
53
|
for r in responses:
|
|
54
54
|
for v in r.get("value", []):
|
|
55
55
|
new_data = {
|
|
@@ -58,13 +58,16 @@ def list_mounted_data_factories(
|
|
|
58
58
|
"Description": v.get("description"),
|
|
59
59
|
}
|
|
60
60
|
|
|
61
|
-
|
|
61
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
62
62
|
|
|
63
|
-
|
|
63
|
+
if dfs:
|
|
64
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
65
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
64
66
|
|
|
65
67
|
return df
|
|
66
68
|
|
|
67
69
|
|
|
70
|
+
@log
|
|
68
71
|
def get_mounted_data_factory_definition(
|
|
69
72
|
mounted_data_factory: str | UUID, workspace: Optional[str | UUID] = None
|
|
70
73
|
) -> dict:
|
|
@@ -96,6 +99,7 @@ def get_mounted_data_factory_definition(
|
|
|
96
99
|
)
|
|
97
100
|
|
|
98
101
|
|
|
102
|
+
@log
|
|
99
103
|
def delete_mounted_data_factory(
|
|
100
104
|
mounted_data_factory: str | UUID, workspace: Optional[str | UUID]
|
|
101
105
|
):
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -60,6 +60,7 @@ def _get_notebook_type(
|
|
|
60
60
|
return file_extension[1:]
|
|
61
61
|
|
|
62
62
|
|
|
63
|
+
@log
|
|
63
64
|
def get_notebook_definition(
|
|
64
65
|
notebook_name: str,
|
|
65
66
|
workspace: Optional[str | UUID] = None,
|
|
@@ -172,6 +173,7 @@ def import_notebook_from_web(
|
|
|
172
173
|
)
|
|
173
174
|
|
|
174
175
|
|
|
176
|
+
@log
|
|
175
177
|
def create_notebook(
|
|
176
178
|
name: str,
|
|
177
179
|
notebook_content: str,
|
|
@@ -227,6 +229,7 @@ def create_notebook(
|
|
|
227
229
|
)
|
|
228
230
|
|
|
229
231
|
|
|
232
|
+
@log
|
|
230
233
|
def update_notebook_definition(
|
|
231
234
|
name: str,
|
|
232
235
|
notebook_content: str,
|
sempy_labs/_semantic_models.py
CHANGED
|
@@ -13,8 +13,10 @@ from sempy_labs._helper_functions import (
|
|
|
13
13
|
)
|
|
14
14
|
import sempy_labs._icons as icons
|
|
15
15
|
import re
|
|
16
|
+
from sempy._utils._log import log
|
|
16
17
|
|
|
17
18
|
|
|
19
|
+
@log
|
|
18
20
|
def get_semantic_model_refresh_schedule(
|
|
19
21
|
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
20
22
|
) -> pd.DataFrame:
|
|
@@ -72,6 +74,7 @@ def get_semantic_model_refresh_schedule(
|
|
|
72
74
|
return df
|
|
73
75
|
|
|
74
76
|
|
|
77
|
+
@log
|
|
75
78
|
def enable_semantic_model_scheduled_refresh(
|
|
76
79
|
dataset: str | UUID,
|
|
77
80
|
workspace: Optional[str | UUID] = None,
|
|
@@ -121,6 +124,7 @@ def enable_semantic_model_scheduled_refresh(
|
|
|
121
124
|
)
|
|
122
125
|
|
|
123
126
|
|
|
127
|
+
@log
|
|
124
128
|
def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] = None):
|
|
125
129
|
"""
|
|
126
130
|
Deletes a semantic model.
|
|
@@ -140,6 +144,7 @@ def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] =
|
|
|
140
144
|
delete_item(item=dataset, type="SemanticModel", workspace=workspace)
|
|
141
145
|
|
|
142
146
|
|
|
147
|
+
@log
|
|
143
148
|
def update_semantic_model_refresh_schedule(
|
|
144
149
|
dataset: str | UUID,
|
|
145
150
|
days: Optional[str | List[str]] = None,
|
|
@@ -231,6 +236,7 @@ def update_semantic_model_refresh_schedule(
|
|
|
231
236
|
)
|
|
232
237
|
|
|
233
238
|
|
|
239
|
+
@log
|
|
234
240
|
def list_semantic_model_datasources(
|
|
235
241
|
dataset: str | UUID,
|
|
236
242
|
workspace: Optional[str | UUID] = None,
|
sempy_labs/_spark.py
CHANGED
|
@@ -8,8 +8,10 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
_create_dataframe,
|
|
9
9
|
)
|
|
10
10
|
from uuid import UUID
|
|
11
|
+
from sempy._utils._log import log
|
|
11
12
|
|
|
12
13
|
|
|
14
|
+
@log
|
|
13
15
|
def list_custom_pools(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
14
16
|
"""
|
|
15
17
|
Lists all `custom pools <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
@@ -73,6 +75,7 @@ def list_custom_pools(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
73
75
|
return df
|
|
74
76
|
|
|
75
77
|
|
|
78
|
+
@log
|
|
76
79
|
def create_custom_pool(
|
|
77
80
|
pool_name: str,
|
|
78
81
|
node_size: str,
|
|
@@ -145,6 +148,7 @@ def create_custom_pool(
|
|
|
145
148
|
)
|
|
146
149
|
|
|
147
150
|
|
|
151
|
+
@log
|
|
148
152
|
def update_custom_pool(
|
|
149
153
|
pool_name: str,
|
|
150
154
|
node_size: Optional[str] = None,
|
|
@@ -251,6 +255,7 @@ def update_custom_pool(
|
|
|
251
255
|
)
|
|
252
256
|
|
|
253
257
|
|
|
258
|
+
@log
|
|
254
259
|
def delete_custom_pool(pool_name: str, workspace: Optional[str | UUID] = None):
|
|
255
260
|
"""
|
|
256
261
|
Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
@@ -286,6 +291,7 @@ def delete_custom_pool(pool_name: str, workspace: Optional[str | UUID] = None):
|
|
|
286
291
|
)
|
|
287
292
|
|
|
288
293
|
|
|
294
|
+
@log
|
|
289
295
|
def get_spark_settings(
|
|
290
296
|
workspace: Optional[str | UUID] = None, return_dataframe: bool = True
|
|
291
297
|
) -> pd.DataFrame | dict:
|
|
@@ -362,6 +368,7 @@ def get_spark_settings(
|
|
|
362
368
|
return response.json()
|
|
363
369
|
|
|
364
370
|
|
|
371
|
+
@log
|
|
365
372
|
def update_spark_settings(
|
|
366
373
|
automatic_log_enabled: Optional[bool] = None,
|
|
367
374
|
high_concurrency_enabled: Optional[bool] = None,
|
sempy_labs/_sql_endpoints.py
CHANGED
|
@@ -7,10 +7,13 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
8
8
|
resolve_item_name_and_id,
|
|
9
9
|
_update_dataframe_datatypes,
|
|
10
|
+
resolve_workspace_id,
|
|
10
11
|
)
|
|
11
12
|
import sempy_labs._icons as icons
|
|
13
|
+
from sempy._utils._log import log
|
|
12
14
|
|
|
13
15
|
|
|
16
|
+
@log
|
|
14
17
|
def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
15
18
|
"""
|
|
16
19
|
Shows the SQL endpoints within a workspace.
|
|
@@ -35,12 +38,13 @@ def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
35
38
|
}
|
|
36
39
|
df = _create_dataframe(columns=columns)
|
|
37
40
|
|
|
38
|
-
|
|
41
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
39
42
|
|
|
40
43
|
responses = _base_api(
|
|
41
44
|
request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
|
|
42
45
|
)
|
|
43
46
|
|
|
47
|
+
dfs = []
|
|
44
48
|
for r in responses:
|
|
45
49
|
for v in r.get("value", []):
|
|
46
50
|
|
|
@@ -49,11 +53,15 @@ def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
49
53
|
"SQL Endpoint Name": v.get("displayName"),
|
|
50
54
|
"Description": v.get("description"),
|
|
51
55
|
}
|
|
52
|
-
|
|
56
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
57
|
+
|
|
58
|
+
if dfs:
|
|
59
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
53
60
|
|
|
54
61
|
return df
|
|
55
62
|
|
|
56
63
|
|
|
64
|
+
@log
|
|
57
65
|
def refresh_sql_endpoint_metadata(
|
|
58
66
|
item: str | UUID,
|
|
59
67
|
type: Literal["Lakehouse", "MirroredDatabase"],
|
|
@@ -149,37 +157,52 @@ def refresh_sql_endpoint_metadata(
|
|
|
149
157
|
"Error Message": "string",
|
|
150
158
|
}
|
|
151
159
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
160
|
+
if result:
|
|
161
|
+
df = pd.json_normalize(result)
|
|
162
|
+
|
|
163
|
+
# Extract error code and message, set to None if no error
|
|
164
|
+
df["Error Code"] = df.get("error.errorCode", None)
|
|
165
|
+
df["Error Message"] = df.get("error.message", None)
|
|
166
|
+
|
|
167
|
+
# Friendly column renaming
|
|
168
|
+
df.rename(
|
|
169
|
+
columns={
|
|
170
|
+
"tableName": "Table Name",
|
|
171
|
+
"startDateTime": "Start Time",
|
|
172
|
+
"endDateTime": "End Time",
|
|
173
|
+
"status": "Status",
|
|
174
|
+
"lastSuccessfulSyncDateTime": "Last Successful Sync Time",
|
|
175
|
+
},
|
|
176
|
+
inplace=True,
|
|
177
|
+
)
|
|
169
178
|
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
179
|
+
# Drop the original 'error' column if present
|
|
180
|
+
df.drop(columns=[col for col in ["error"] if col in df.columns], inplace=True)
|
|
181
|
+
|
|
182
|
+
# Optional: Reorder columns
|
|
183
|
+
column_order = [
|
|
184
|
+
"Table Name",
|
|
185
|
+
"Status",
|
|
186
|
+
"Start Time",
|
|
187
|
+
"End Time",
|
|
188
|
+
"Last Successful Sync Time",
|
|
189
|
+
"Error Code",
|
|
190
|
+
"Error Message",
|
|
191
|
+
]
|
|
192
|
+
df = df[column_order]
|
|
193
|
+
|
|
194
|
+
printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
|
|
195
|
+
if tables:
|
|
196
|
+
print(f"{printout} for the following tables: {tables}.")
|
|
197
|
+
else:
|
|
198
|
+
print(f"{printout} for all tables.")
|
|
199
|
+
else:
|
|
200
|
+
# If the target item has no tables to refresh the metadata for
|
|
201
|
+
df = pd.DataFrame(columns=columns.keys())
|
|
202
|
+
print(
|
|
203
|
+
f"{icons.yellow_dot} The SQL endpoint '{item_name}' {type.lower()} within the '{workspace_name}' workspace has no tables to refresh..."
|
|
204
|
+
)
|
|
176
205
|
|
|
177
206
|
_update_dataframe_datatypes(df, columns)
|
|
178
207
|
|
|
179
|
-
printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
|
|
180
|
-
if tables:
|
|
181
|
-
print(f"{printout} for the following tables: {tables}.")
|
|
182
|
-
else:
|
|
183
|
-
print(f"{printout} for all tables.")
|
|
184
|
-
|
|
185
208
|
return df
|
sempy_labs/_sqldatabase.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from sempy_labs._helper_functions import (
|
|
2
|
-
|
|
2
|
+
resolve_workspace_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
5
5
|
_update_dataframe_datatypes,
|
|
@@ -9,8 +9,10 @@ from sempy_labs._helper_functions import (
|
|
|
9
9
|
import pandas as pd
|
|
10
10
|
from typing import Optional
|
|
11
11
|
from uuid import UUID
|
|
12
|
+
from sempy._utils._log import log
|
|
12
13
|
|
|
13
14
|
|
|
15
|
+
@log
|
|
14
16
|
def create_sql_database(
|
|
15
17
|
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
16
18
|
):
|
|
@@ -36,6 +38,7 @@ def create_sql_database(
|
|
|
36
38
|
)
|
|
37
39
|
|
|
38
40
|
|
|
41
|
+
@log
|
|
39
42
|
def delete_sql_database(
|
|
40
43
|
sql_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
41
44
|
):
|
|
@@ -57,6 +60,7 @@ def delete_sql_database(
|
|
|
57
60
|
delete_item(item=sql_database, type="SQLDatabase", workspace=workspace)
|
|
58
61
|
|
|
59
62
|
|
|
63
|
+
@log
|
|
60
64
|
def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
61
65
|
"""
|
|
62
66
|
Lists all SQL databases in the Fabric workspace.
|
|
@@ -78,7 +82,7 @@ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
78
82
|
A pandas dataframe showing a list of SQL databases in the Fabric workspace.
|
|
79
83
|
"""
|
|
80
84
|
|
|
81
|
-
|
|
85
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
82
86
|
|
|
83
87
|
columns = {
|
|
84
88
|
"SQL Database Name": "string",
|
|
@@ -96,6 +100,7 @@ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
96
100
|
client="fabric_sp",
|
|
97
101
|
)
|
|
98
102
|
|
|
103
|
+
dfs = []
|
|
99
104
|
for r in responses:
|
|
100
105
|
for v in r.get("value", []):
|
|
101
106
|
prop = v.get("properties", {})
|
|
@@ -108,13 +113,16 @@ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
108
113
|
"Server FQDN": prop.get("serverFqdn"),
|
|
109
114
|
}
|
|
110
115
|
|
|
111
|
-
|
|
116
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
112
117
|
|
|
113
|
-
|
|
118
|
+
if dfs:
|
|
119
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
120
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
114
121
|
|
|
115
122
|
return df
|
|
116
123
|
|
|
117
124
|
|
|
125
|
+
@log
|
|
118
126
|
def get_sql_database_tables(
|
|
119
127
|
sql_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
120
128
|
) -> pd.DataFrame:
|
|
@@ -150,6 +158,7 @@ def get_sql_database_tables(
|
|
|
150
158
|
return df
|
|
151
159
|
|
|
152
160
|
|
|
161
|
+
@log
|
|
153
162
|
def get_sql_database_columns(
|
|
154
163
|
sql_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
155
164
|
) -> pd.DataFrame:
|
sempy_labs/_tags.py
CHANGED
|
@@ -10,8 +10,10 @@ import pandas as pd
|
|
|
10
10
|
from typing import Optional, List
|
|
11
11
|
from uuid import UUID
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
|
+
from sempy._utils._log import log
|
|
13
14
|
|
|
14
15
|
|
|
16
|
+
@log
|
|
15
17
|
def list_tags() -> pd.DataFrame:
|
|
16
18
|
"""
|
|
17
19
|
Shows a list of all the tenant's tags.
|
|
@@ -39,7 +41,6 @@ def list_tags() -> pd.DataFrame:
|
|
|
39
41
|
)
|
|
40
42
|
|
|
41
43
|
dfs = []
|
|
42
|
-
|
|
43
44
|
for r in responses:
|
|
44
45
|
for v in r.get("value", []):
|
|
45
46
|
new_data = {
|
|
@@ -55,6 +56,7 @@ def list_tags() -> pd.DataFrame:
|
|
|
55
56
|
return df
|
|
56
57
|
|
|
57
58
|
|
|
59
|
+
@log
|
|
58
60
|
def resolve_tags(tags: str | List[str]) -> List[str]:
|
|
59
61
|
"""
|
|
60
62
|
Resolves the tags to a list of strings.
|
|
@@ -92,6 +94,7 @@ def resolve_tags(tags: str | List[str]) -> List[str]:
|
|
|
92
94
|
return tag_list
|
|
93
95
|
|
|
94
96
|
|
|
97
|
+
@log
|
|
95
98
|
def apply_tags(
|
|
96
99
|
item: str | UUID,
|
|
97
100
|
type: str,
|
|
@@ -143,6 +146,7 @@ def apply_tags(
|
|
|
143
146
|
)
|
|
144
147
|
|
|
145
148
|
|
|
149
|
+
@log
|
|
146
150
|
def unapply_tags(
|
|
147
151
|
item: str | UUID,
|
|
148
152
|
type: str,
|
|
@@ -4,8 +4,10 @@ from sempy_labs._helper_functions import (
|
|
|
4
4
|
)
|
|
5
5
|
from datetime import datetime, timedelta, timezone
|
|
6
6
|
import xml.etree.ElementTree as ET
|
|
7
|
+
from sempy._utils._log import log
|
|
7
8
|
|
|
8
9
|
|
|
10
|
+
@log
|
|
9
11
|
def get_user_delegation_key():
|
|
10
12
|
"""
|
|
11
13
|
Gets a key that can be used to sign a user delegation SAS (shared access signature). A user delegation SAS grants access to Azure Blob Storage resources by using Microsoft Entra credentials.
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from sempy_labs._helper_functions import (
|
|
2
|
-
resolve_workspace_name_and_id,
|
|
3
2
|
resolve_workspace_id,
|
|
4
3
|
_base_api,
|
|
5
4
|
_create_dataframe,
|
|
@@ -9,8 +8,10 @@ from sempy_labs._helper_functions import (
|
|
|
9
8
|
import pandas as pd
|
|
10
9
|
from typing import Optional
|
|
11
10
|
from uuid import UUID
|
|
11
|
+
from sempy._utils._log import log
|
|
12
12
|
|
|
13
13
|
|
|
14
|
+
@log
|
|
14
15
|
def list_variable_libraries(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
15
16
|
"""
|
|
16
17
|
Shows the variable libraries within a workspace.
|
|
@@ -68,6 +69,7 @@ def list_variable_libraries(workspace: Optional[str | UUID] = None) -> pd.DataFr
|
|
|
68
69
|
return df
|
|
69
70
|
|
|
70
71
|
|
|
72
|
+
@log
|
|
71
73
|
def delete_variable_library(
|
|
72
74
|
variable_library: str | UUID, workspace: Optional[str | UUID] = None
|
|
73
75
|
):
|
sempy_labs/_warehouses.py
CHANGED
|
@@ -4,13 +4,16 @@ from sempy_labs._helper_functions import (
|
|
|
4
4
|
_create_dataframe,
|
|
5
5
|
_update_dataframe_datatypes,
|
|
6
6
|
delete_item,
|
|
7
|
+
resolve_workspace_id,
|
|
7
8
|
)
|
|
8
9
|
import pandas as pd
|
|
9
10
|
from typing import Optional
|
|
10
11
|
import sempy_labs._icons as icons
|
|
11
12
|
from uuid import UUID
|
|
13
|
+
from sempy._utils._log import log
|
|
12
14
|
|
|
13
15
|
|
|
16
|
+
@log
|
|
14
17
|
def create_warehouse(
|
|
15
18
|
warehouse: str,
|
|
16
19
|
description: Optional[str] = None,
|
|
@@ -68,6 +71,7 @@ def create_warehouse(
|
|
|
68
71
|
return result.get("id")
|
|
69
72
|
|
|
70
73
|
|
|
74
|
+
@log
|
|
71
75
|
def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
72
76
|
"""
|
|
73
77
|
Shows the warehouses within a workspace.
|
|
@@ -99,7 +103,7 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
99
103
|
}
|
|
100
104
|
df = _create_dataframe(columns=columns)
|
|
101
105
|
|
|
102
|
-
|
|
106
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
103
107
|
|
|
104
108
|
responses = _base_api(
|
|
105
109
|
request=f"/v1/workspaces/{workspace_id}/warehouses",
|
|
@@ -107,6 +111,7 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
107
111
|
client="fabric_sp",
|
|
108
112
|
)
|
|
109
113
|
|
|
114
|
+
dfs = []
|
|
110
115
|
for r in responses:
|
|
111
116
|
for v in r.get("value", []):
|
|
112
117
|
prop = v.get("properties", {})
|
|
@@ -119,13 +124,16 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
119
124
|
"Created Date": prop.get("createdDate"),
|
|
120
125
|
"Last Updated Time": prop.get("lastUpdatedTime"),
|
|
121
126
|
}
|
|
122
|
-
|
|
127
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
123
128
|
|
|
124
|
-
|
|
129
|
+
if dfs:
|
|
130
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
131
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
125
132
|
|
|
126
133
|
return df
|
|
127
134
|
|
|
128
135
|
|
|
136
|
+
@log
|
|
129
137
|
def delete_warehouse(name: str | UUID, workspace: Optional[str | UUID] = None):
|
|
130
138
|
"""
|
|
131
139
|
Deletes a Fabric warehouse.
|
|
@@ -145,6 +153,7 @@ def delete_warehouse(name: str | UUID, workspace: Optional[str | UUID] = None):
|
|
|
145
153
|
delete_item(item=name, type="Warehouse", workspace=workspace)
|
|
146
154
|
|
|
147
155
|
|
|
156
|
+
@log
|
|
148
157
|
def get_warehouse_tables(
|
|
149
158
|
warehouse: str | UUID, workspace: Optional[str | UUID] = None
|
|
150
159
|
) -> pd.DataFrame:
|
|
@@ -180,6 +189,7 @@ def get_warehouse_tables(
|
|
|
180
189
|
return df
|
|
181
190
|
|
|
182
191
|
|
|
192
|
+
@log
|
|
183
193
|
def get_warehouse_columns(
|
|
184
194
|
warehouse: str | UUID, workspace: Optional[str | UUID] = None
|
|
185
195
|
) -> pd.DataFrame:
|
sempy_labs/_workloads.py
CHANGED
|
@@ -7,8 +7,10 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
_create_dataframe,
|
|
8
8
|
)
|
|
9
9
|
from uuid import UUID
|
|
10
|
+
from sempy._utils._log import log
|
|
10
11
|
|
|
11
12
|
|
|
13
|
+
@log
|
|
12
14
|
def list_workloads(capacity: str | UUID, **kwargs) -> pd.DataFrame:
|
|
13
15
|
"""
|
|
14
16
|
Returns the current state of the specified capacity workloads.
|
|
@@ -59,6 +61,7 @@ def list_workloads(capacity: str | UUID, **kwargs) -> pd.DataFrame:
|
|
|
59
61
|
return df
|
|
60
62
|
|
|
61
63
|
|
|
64
|
+
@log
|
|
62
65
|
def patch_workload(
|
|
63
66
|
capacity: str | UUID,
|
|
64
67
|
workload_name: str,
|
|
@@ -5,8 +5,10 @@ from sempy_labs._helper_functions import (
|
|
|
5
5
|
from typing import Optional
|
|
6
6
|
import sempy_labs._icons as icons
|
|
7
7
|
from uuid import UUID
|
|
8
|
+
from sempy._utils._log import log
|
|
8
9
|
|
|
9
10
|
|
|
11
|
+
@log
|
|
10
12
|
def provision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
11
13
|
"""
|
|
12
14
|
Provisions a workspace identity for a workspace.
|
|
@@ -38,6 +40,7 @@ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
|
38
40
|
)
|
|
39
41
|
|
|
40
42
|
|
|
43
|
+
@log
|
|
41
44
|
def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
42
45
|
"""
|
|
43
46
|
Deprovisions a workspace identity for a workspace.
|
sempy_labs/_workspaces.py
CHANGED
|
@@ -8,8 +8,10 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
_create_dataframe,
|
|
9
9
|
)
|
|
10
10
|
from uuid import UUID
|
|
11
|
+
from sempy._utils._log import log
|
|
11
12
|
|
|
12
13
|
|
|
14
|
+
@log
|
|
13
15
|
def delete_user_from_workspace(
|
|
14
16
|
email_address: str, workspace: Optional[str | UUID] = None
|
|
15
17
|
):
|
|
@@ -42,6 +44,7 @@ def delete_user_from_workspace(
|
|
|
42
44
|
)
|
|
43
45
|
|
|
44
46
|
|
|
47
|
+
@log
|
|
45
48
|
def update_workspace_user(
|
|
46
49
|
email_address: str,
|
|
47
50
|
role_name: str,
|
|
@@ -102,6 +105,7 @@ def update_workspace_user(
|
|
|
102
105
|
)
|
|
103
106
|
|
|
104
107
|
|
|
108
|
+
@log
|
|
105
109
|
def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
106
110
|
"""
|
|
107
111
|
A list of all the users of a workspace and their roles.
|
|
@@ -140,6 +144,7 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
140
144
|
client="fabric_sp",
|
|
141
145
|
)
|
|
142
146
|
|
|
147
|
+
dfs = []
|
|
143
148
|
for r in responses:
|
|
144
149
|
for v in r.get("value", []):
|
|
145
150
|
p = v.get("principal", {})
|
|
@@ -150,11 +155,15 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
150
155
|
"Role": v.get("role"),
|
|
151
156
|
"Email Address": p.get("userDetails", {}).get("userPrincipalName"),
|
|
152
157
|
}
|
|
153
|
-
|
|
158
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
159
|
+
|
|
160
|
+
if dfs:
|
|
161
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
154
162
|
|
|
155
163
|
return df
|
|
156
164
|
|
|
157
165
|
|
|
166
|
+
@log
|
|
158
167
|
def add_user_to_workspace(
|
|
159
168
|
email_address: str,
|
|
160
169
|
role_name: str,
|
|
@@ -213,6 +222,7 @@ def add_user_to_workspace(
|
|
|
213
222
|
)
|
|
214
223
|
|
|
215
224
|
|
|
225
|
+
@log
|
|
216
226
|
def assign_workspace_to_capacity(
|
|
217
227
|
capacity: str | UUID,
|
|
218
228
|
workspace: Optional[str | UUID] = None,
|
|
@@ -255,6 +265,7 @@ def assign_workspace_to_capacity(
|
|
|
255
265
|
)
|
|
256
266
|
|
|
257
267
|
|
|
268
|
+
@log
|
|
258
269
|
def unassign_workspace_from_capacity(workspace: Optional[str | UUID] = None):
|
|
259
270
|
"""
|
|
260
271
|
Unassigns a workspace from its assigned capacity.
|
|
@@ -284,6 +295,7 @@ def unassign_workspace_from_capacity(workspace: Optional[str | UUID] = None):
|
|
|
284
295
|
)
|
|
285
296
|
|
|
286
297
|
|
|
298
|
+
@log
|
|
287
299
|
def list_workspace_role_assignments(
|
|
288
300
|
workspace: Optional[str | UUID] = None,
|
|
289
301
|
) -> pd.DataFrame:
|
|
@@ -337,6 +349,7 @@ def list_workspace_role_assignments(
|
|
|
337
349
|
return df
|
|
338
350
|
|
|
339
351
|
|
|
352
|
+
@log
|
|
340
353
|
def delete_workspace(workspace: Optional[str | UUID] = None):
|
|
341
354
|
"""
|
|
342
355
|
Deletes a workspace.
|