semantic-link-labs 0.12.4__py3-none-any.whl → 0.12.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.12.4.dist-info → semantic_link_labs-0.12.5.dist-info}/METADATA +4 -3
- {semantic_link_labs-0.12.4.dist-info → semantic_link_labs-0.12.5.dist-info}/RECORD +23 -21
- sempy_labs/__init__.py +10 -8
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_dataflows.py +98 -10
- sempy_labs/_helper_functions.py +5 -1
- sempy_labs/_managed_private_endpoints.py +62 -0
- sempy_labs/_model_bpa.py +6 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/admin/_basic_functions.py +17 -13
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/deployment_pipeline/__init__.py +21 -0
- sempy_labs/deployment_pipeline/_items.py +486 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +73 -41
- sempy_labs/graph/__init__.py +2 -0
- sempy_labs/graph/_groups.py +63 -0
- sempy_labs/graph/_users.py +61 -17
- sempy_labs/report/_export_report.py +0 -22
- sempy_labs/report/_report_rebind.py +29 -43
- sempy_labs/report/_reportwrapper.py +65 -30
- sempy_labs/_deployment_pipelines.py +0 -209
- {semantic_link_labs-0.12.4.dist-info → semantic_link_labs-0.12.5.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.12.4.dist-info → semantic_link_labs-0.12.5.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.12.4.dist-info → semantic_link_labs-0.12.5.dist-info}/top_level.txt +0 -0
sempy_labs/_onelake.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
_base_api,
|
|
3
|
+
resolve_workspace_id,
|
|
4
|
+
resolve_lakehouse_name_and_id,
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
)
|
|
7
|
+
from sempy._utils._log import log
|
|
8
|
+
from uuid import UUID
|
|
9
|
+
from typing import Optional
|
|
10
|
+
import pandas as pd
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@log
|
|
15
|
+
def get_onelake_settings(workspace: Optional[str | UUID] = None):
|
|
16
|
+
"""
|
|
17
|
+
Obtains the workspace OneLake settings.
|
|
18
|
+
|
|
19
|
+
This is a wrapper function for the following API: `OneLake Settings - Get Settings <https://learn.microsoft.com/rest/api/fabric/core/onelake-settings/get-settings>`_.
|
|
20
|
+
|
|
21
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
workspace : str | uuid.UUID, default=None
|
|
26
|
+
The name or ID of the workspace.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
|
|
30
|
+
Returns
|
|
31
|
+
-------
|
|
32
|
+
pandas.DataFrame
|
|
33
|
+
The workspace OneLake settings.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
37
|
+
result = _base_api(
|
|
38
|
+
request=f"/v1/workspaces/{workspace_id}/onelake/settings", client="fabric_sp"
|
|
39
|
+
).json()
|
|
40
|
+
|
|
41
|
+
d = result.get("diagnostics", {})
|
|
42
|
+
enabled = True if d.get("status", {}) == "Enabled" else False
|
|
43
|
+
rows = []
|
|
44
|
+
rows.append(
|
|
45
|
+
{
|
|
46
|
+
"Enabled": enabled,
|
|
47
|
+
"Destination Type": (
|
|
48
|
+
d.get("destination", {}).get("type", {}) if enabled else None
|
|
49
|
+
),
|
|
50
|
+
"Destination Id": (
|
|
51
|
+
d.get("destination", {}).get("lakehouse", {}).get("itemId", {})
|
|
52
|
+
if enabled
|
|
53
|
+
else None
|
|
54
|
+
),
|
|
55
|
+
"Destination Workspace Id": (
|
|
56
|
+
d.get("destination", {}).get("lakehouse", {}).get("workspaceId", {})
|
|
57
|
+
if enabled
|
|
58
|
+
else None
|
|
59
|
+
),
|
|
60
|
+
}
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
return pd.DataFrame(rows)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def modify_onelake_diagnostics(
|
|
67
|
+
workspace: Optional[str | UUID] = None,
|
|
68
|
+
enabled: bool = True,
|
|
69
|
+
destination_lakehouse: Optional[str | UUID] = None,
|
|
70
|
+
destination_workspace: Optional[str | UUID] = None,
|
|
71
|
+
):
|
|
72
|
+
"""
|
|
73
|
+
Obtains the workspace OneLake settings.
|
|
74
|
+
|
|
75
|
+
This is a wrapper function for the following API: `OneLake Settings - Modify Diagnostics <https://learn.microsoft.com/rest/api/fabric/core/onelake-settings/modify-diagnostics>`_.
|
|
76
|
+
|
|
77
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
workspace : str | uuid.UUID, default=None
|
|
82
|
+
The name or ID of the workspace.
|
|
83
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
84
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
85
|
+
enabled : bool, default=True
|
|
86
|
+
Whether to enable or disable OneLake diagnostics.
|
|
87
|
+
destination_lakehouse : str | uuid.UUID, default=None
|
|
88
|
+
The name or ID of the destination lakehouse.
|
|
89
|
+
Defaults to None which resolves to the lakehouse of the attached lakehouse
|
|
90
|
+
or if no lakehouse attached, resolves to the lakehouse of the notebook.
|
|
91
|
+
destination_workspace : str | uuid.UUID, default=None
|
|
92
|
+
The name or ID of the destination workspace.
|
|
93
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
94
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
98
|
+
(destination_workspace_name, destination_workspace_id) = (
|
|
99
|
+
resolve_workspace_name_and_id(destination_workspace)
|
|
100
|
+
)
|
|
101
|
+
(destination_lakehouse_name, destination_lakehouse_id) = (
|
|
102
|
+
resolve_lakehouse_name_and_id(destination_lakehouse, destination_workspace_id)
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
if enabled:
|
|
106
|
+
payload = {
|
|
107
|
+
"status": "Enabled",
|
|
108
|
+
"destination": {
|
|
109
|
+
"type": "Lakehouse",
|
|
110
|
+
"lakehouse": {
|
|
111
|
+
"referenceType": "ById",
|
|
112
|
+
"itemId": destination_lakehouse_id,
|
|
113
|
+
"workspaceId": destination_workspace_id,
|
|
114
|
+
},
|
|
115
|
+
},
|
|
116
|
+
}
|
|
117
|
+
else:
|
|
118
|
+
payload = {"status": "Disabled"}
|
|
119
|
+
_base_api(
|
|
120
|
+
request=f"/v1/workspaces/{workspace_id}/onelake/settings/modifyDiagnostics",
|
|
121
|
+
client="fabric_sp",
|
|
122
|
+
method="post",
|
|
123
|
+
payload=payload,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
if enabled:
|
|
127
|
+
print(
|
|
128
|
+
f"{icons.green_dot} OneLake diagnostics have been enabled and updated to use the '{destination_lakehouse_name}' lakehouse in the '{destination_workspace_name}' workspace as the destination."
|
|
129
|
+
)
|
|
130
|
+
else:
|
|
131
|
+
print(f"{icons.green_dot} OneLake diagnostics have been disabled.")
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
from typing import Optional, List, Union, Tuple
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
import sempy_labs._icons as icons
|
|
4
|
-
from
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
_is_valid_uuid,
|
|
6
6
|
_build_url,
|
|
7
|
-
_update_dataframe_datatypes,
|
|
8
7
|
_base_api,
|
|
9
8
|
_create_dataframe,
|
|
10
9
|
)
|
|
@@ -337,16 +336,23 @@ def list_workspace_access_details(
|
|
|
337
336
|
request=f"/v1/admin/workspaces/{workspace_id}/users", client="fabric_sp"
|
|
338
337
|
)
|
|
339
338
|
|
|
339
|
+
rows = []
|
|
340
340
|
for v in response.json().get("accessDetails", []):
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
341
|
+
rows.append(
|
|
342
|
+
{
|
|
343
|
+
"User Id": v.get("principal", {}).get("id"),
|
|
344
|
+
"User Name": v.get("principal", {}).get("displayName"),
|
|
345
|
+
"User Type": v.get("principal", {}).get("type"),
|
|
346
|
+
"Workspace Name": workspace_name,
|
|
347
|
+
"Workspace Id": workspace_id,
|
|
348
|
+
"Workspace Role": v.get("workspaceAccessDetails", {}).get(
|
|
349
|
+
"workspaceRole"
|
|
350
|
+
),
|
|
351
|
+
}
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
if rows:
|
|
355
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
350
356
|
|
|
351
357
|
return df
|
|
352
358
|
|
|
@@ -454,6 +460,4 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
454
460
|
if rows:
|
|
455
461
|
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
456
462
|
|
|
457
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
458
|
-
|
|
459
463
|
return df
|
sempy_labs/admin/_workspaces.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_build_url,
|
|
4
4
|
_encode_user,
|
|
@@ -8,7 +8,7 @@ from .._helper_functions import (
|
|
|
8
8
|
|
|
9
9
|
from uuid import UUID
|
|
10
10
|
from typing import Optional
|
|
11
|
-
from ._basic_functions import (
|
|
11
|
+
from sempy_labs.admin._basic_functions import (
|
|
12
12
|
_resolve_workspace_name_and_id,
|
|
13
13
|
)
|
|
14
14
|
import sempy_labs._icons as icons
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from ._items import (
|
|
2
|
+
list_deployment_pipeline_operations,
|
|
3
|
+
list_deployment_pipeline_role_assignments,
|
|
4
|
+
list_deployment_pipeline_stage_items,
|
|
5
|
+
list_deployment_pipeline_stages,
|
|
6
|
+
list_deployment_pipelines,
|
|
7
|
+
unassign_workspace_from_stage,
|
|
8
|
+
assign_workspace_to_stage,
|
|
9
|
+
delete_deployment_pipeline,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"list_deployment_pipeline_operations",
|
|
14
|
+
"list_deployment_pipeline_role_assignments",
|
|
15
|
+
"list_deployment_pipeline_stage_items",
|
|
16
|
+
"list_deployment_pipeline_stages",
|
|
17
|
+
"list_deployment_pipelines",
|
|
18
|
+
"unassign_workspace_from_stage",
|
|
19
|
+
"assign_workspace_to_stage",
|
|
20
|
+
"delete_deployment_pipeline",
|
|
21
|
+
]
|