semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +10 -6
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +54 -44
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +27 -1
- sempy_labs/_ai.py +8 -5
- sempy_labs/_capacity_migration.py +3 -2
- sempy_labs/_connections.py +45 -9
- sempy_labs/_dax.py +17 -3
- sempy_labs/_delta_analyzer.py +308 -138
- sempy_labs/_eventhouses.py +70 -1
- sempy_labs/_gateways.py +56 -8
- sempy_labs/_generate_semantic_model.py +30 -9
- sempy_labs/_helper_functions.py +84 -9
- sempy_labs/_job_scheduler.py +226 -2
- sempy_labs/_list_functions.py +42 -19
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +17 -2
- sempy_labs/_model_bpa_rules.py +20 -8
- sempy_labs/_semantic_models.py +117 -0
- sempy_labs/_sql.py +73 -6
- sempy_labs/_sqldatabase.py +227 -0
- sempy_labs/_translations.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_warehouses.py +1 -1
- sempy_labs/admin/__init__.py +49 -8
- sempy_labs/admin/_activities.py +166 -0
- sempy_labs/admin/_apps.py +143 -0
- sempy_labs/admin/_basic_functions.py +32 -652
- sempy_labs/admin/_capacities.py +250 -0
- sempy_labs/admin/_datasets.py +184 -0
- sempy_labs/admin/_domains.py +1 -3
- sempy_labs/admin/_items.py +3 -1
- sempy_labs/admin/_reports.py +165 -0
- sempy_labs/admin/_scanner.py +53 -49
- sempy_labs/admin/_shared.py +74 -0
- sempy_labs/admin/_tenant.py +489 -0
- sempy_labs/directlake/_dl_helper.py +0 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
- sempy_labs/graph/_teams.py +1 -1
- sempy_labs/graph/_users.py +9 -1
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +2 -2
- sempy_labs/lakehouse/_lakehouse.py +3 -3
- sempy_labs/lakehouse/_shortcuts.py +29 -16
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/report/__init__.py +3 -1
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +272 -0
- sempy_labs/report/_report_functions.py +11 -263
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/tom/_model.py +281 -29
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
sempy_labs/admin/_scanner.py
CHANGED
|
@@ -2,12 +2,14 @@ import sempy.fabric as fabric
|
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
from uuid import UUID
|
|
4
4
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
5
|
-
import numpy as np
|
|
6
5
|
import time
|
|
6
|
+
import sempy_labs._icons as icons
|
|
7
7
|
from sempy_labs.admin._basic_functions import list_workspaces
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
from sempy_labs._helper_functions import (
|
|
10
10
|
_base_api,
|
|
11
|
+
_is_valid_uuid,
|
|
12
|
+
_build_url,
|
|
11
13
|
)
|
|
12
14
|
|
|
13
15
|
|
|
@@ -39,22 +41,17 @@ def scan_workspaces(
|
|
|
39
41
|
dataset_expressions : bool, default=False
|
|
40
42
|
Whether to return data source details.
|
|
41
43
|
lineage : bool, default=False
|
|
42
|
-
Whether to return lineage info (upstream dataflows, tiles, data source IDs)
|
|
44
|
+
Whether to return lineage info (upstream dataflows, tiles, data source IDs)
|
|
43
45
|
artifact_users : bool, default=False
|
|
44
46
|
Whether to return user details for a Power BI item (such as a report or a dashboard).
|
|
45
|
-
workspace : str | List[str] |
|
|
46
|
-
The required workspace name(s) or id(s) to be scanned
|
|
47
|
+
workspace : str | List[str] | UUID | List[UUID], default=None
|
|
48
|
+
The required workspace name(s) or id(s) to be scanned. It supports a limit of 100 workspaces and only IDs in GUID format.
|
|
47
49
|
|
|
48
50
|
Returns
|
|
49
51
|
-------
|
|
50
52
|
dict
|
|
51
53
|
A json object with the scan result.
|
|
52
54
|
"""
|
|
53
|
-
scan_result = {
|
|
54
|
-
"workspaces": [],
|
|
55
|
-
"datasourceInstances": [],
|
|
56
|
-
"misconfiguredDatasourceInstances": [],
|
|
57
|
-
}
|
|
58
55
|
|
|
59
56
|
if workspace is None:
|
|
60
57
|
workspace = fabric.resolve_workspace_name()
|
|
@@ -62,55 +59,62 @@ def scan_workspaces(
|
|
|
62
59
|
if isinstance(workspace, str):
|
|
63
60
|
workspace = [workspace]
|
|
64
61
|
|
|
62
|
+
if len(workspace) > 100:
|
|
63
|
+
print(
|
|
64
|
+
f"{icons.yellow_dot} More than 100 workspaces where provided. Truncating to the fist 100."
|
|
65
|
+
)
|
|
66
|
+
workspace = workspace[:100]
|
|
67
|
+
|
|
65
68
|
workspace_list = []
|
|
66
69
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
+
for w in workspace:
|
|
71
|
+
if _is_valid_uuid(w):
|
|
72
|
+
workspace_list.append(w)
|
|
73
|
+
else:
|
|
74
|
+
dfW = list_workspaces(workspace=w)
|
|
75
|
+
workspace_list = (
|
|
76
|
+
workspace_list + dfW[dfW["Name"].isin(workspace)]["Id"].tolist()
|
|
77
|
+
)
|
|
70
78
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
79
|
+
url = "/v1.0/myorg/admin/workspaces/getInfo"
|
|
80
|
+
params = {}
|
|
81
|
+
params["lineage"] = lineage
|
|
82
|
+
params["datasourceDetails"] = data_source_details
|
|
83
|
+
params["datasetSchema"] = dataset_schema
|
|
84
|
+
params["datasetExpressions"] = dataset_expressions
|
|
85
|
+
params["getArtifactUsers"] = artifact_users
|
|
76
86
|
|
|
77
|
-
|
|
78
|
-
response = _base_api(
|
|
79
|
-
request=url,
|
|
80
|
-
method="post",
|
|
81
|
-
payload=payload,
|
|
82
|
-
status_codes=202,
|
|
83
|
-
client="fabric_sp",
|
|
84
|
-
)
|
|
87
|
+
url = _build_url(url, params)
|
|
85
88
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
89
|
+
payload = {"workspaces": workspace_list}
|
|
90
|
+
|
|
91
|
+
response = _base_api(
|
|
92
|
+
request=url,
|
|
93
|
+
method="post",
|
|
94
|
+
payload=payload,
|
|
95
|
+
status_codes=202,
|
|
96
|
+
client="fabric_sp",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
scan_id = response.json()["id"]
|
|
100
|
+
scan_status = response.json().get("status")
|
|
101
|
+
|
|
102
|
+
while scan_status not in ["Succeeded", "Failed"]:
|
|
103
|
+
time.sleep(1)
|
|
97
104
|
response = _base_api(
|
|
98
|
-
request=f"/v1.0/myorg/admin/workspaces/
|
|
105
|
+
request=f"/v1.0/myorg/admin/workspaces/scanStatus/{scan_id}",
|
|
99
106
|
client="fabric_sp",
|
|
100
107
|
)
|
|
101
|
-
|
|
108
|
+
scan_status = response.json().get("status")
|
|
102
109
|
|
|
103
|
-
|
|
104
|
-
|
|
110
|
+
if scan_status == "Failed":
|
|
111
|
+
raise FabricHTTPException(response)
|
|
105
112
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
113
|
+
response = _base_api(
|
|
114
|
+
request=f"/v1.0/myorg/admin/workspaces/scanResult/{scan_id}",
|
|
115
|
+
client="fabric_sp",
|
|
116
|
+
)
|
|
110
117
|
|
|
111
|
-
|
|
112
|
-
scan_result["misconfiguredDatasourceInstances"].extend(
|
|
113
|
-
responseJson["misconfiguredDatasourceInstances"]
|
|
114
|
-
)
|
|
118
|
+
print(f"{icons.green_dot} Status: {scan_status}")
|
|
115
119
|
|
|
116
|
-
return
|
|
120
|
+
return response.json()
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from sempy_labs._helper_functions import (
|
|
3
|
+
_base_api,
|
|
4
|
+
_create_dataframe,
|
|
5
|
+
)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def list_widely_shared_artifacts(
|
|
9
|
+
api_name: str = "LinksSharedToWholeOrganization",
|
|
10
|
+
) -> pd.DataFrame:
|
|
11
|
+
"""
|
|
12
|
+
Returns a list of Power BI reports that are shared with the whole organization through links or a list of Power BI items (such as reports or dashboards) that are published to the web.
|
|
13
|
+
|
|
14
|
+
This is a wrapper function for the following APIs:
|
|
15
|
+
`Admin - WidelySharedArtifacts LinksSharedToWholeOrganization <https://learn.microsoft.com/rest/api/power-bi/admin/widely-shared-artifacts-links-shared-to-whole-organization>`_.
|
|
16
|
+
`Admin - WidelySharedArtifacts PublishedToWeb <https://learn.microsoft.com/rest/api/power-bi/admin/widely-shared-artifacts-published-to-web>`_.
|
|
17
|
+
|
|
18
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
19
|
+
|
|
20
|
+
Parameters
|
|
21
|
+
----------
|
|
22
|
+
api_name : str, default = "LinksSharedToWholeOrganization"
|
|
23
|
+
The name of the API to call. Either "LinksSharedToWholeOrganization" or "PublishedToWeb".
|
|
24
|
+
|
|
25
|
+
Returns
|
|
26
|
+
-------
|
|
27
|
+
pandas.DataFrame
|
|
28
|
+
A pandas dataframe showing a list of Power BI reports that are shared with the whole organization through links or a list of Power BI items (such as reports or dashboards) that are published to the web.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
columns = {
|
|
32
|
+
"Artifact Id": "string",
|
|
33
|
+
"Artifact Name": "string",
|
|
34
|
+
"Artifact Type": "string",
|
|
35
|
+
"Access Right": "string",
|
|
36
|
+
"Share Type": "string",
|
|
37
|
+
"Sharer Name": "string",
|
|
38
|
+
"Sharer Email Address": "string",
|
|
39
|
+
"Sharer Identifier": "string",
|
|
40
|
+
"Sharer Graph Id": "string",
|
|
41
|
+
"Sharer Principal Type": "string",
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
df = _create_dataframe(columns=columns)
|
|
45
|
+
|
|
46
|
+
api = (
|
|
47
|
+
"linksSharedToWholeOrganization"
|
|
48
|
+
if api_name == "LinksSharedToWholeOrganization"
|
|
49
|
+
else "publishedToWeb"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
responses = _base_api(
|
|
53
|
+
request=f"/v1.0/myorg/admin/widelySharedArtifacts/{api}", client="fabric_sp"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
for r in responses:
|
|
57
|
+
for v in r.get("value", []):
|
|
58
|
+
sharer = v.get("sharer", {})
|
|
59
|
+
new_data = {
|
|
60
|
+
"Artifact Id": v.get("artifactId"),
|
|
61
|
+
"Artifact Name": v.get("displayName"),
|
|
62
|
+
"Artifact Type": v.get("artifactType"),
|
|
63
|
+
"Access Right": v.get("accessRight"),
|
|
64
|
+
"Share Type": v.get("shareType"),
|
|
65
|
+
"Sharer Name": sharer.get("displayName"),
|
|
66
|
+
"Sharer Email Address": sharer.get("emailAddress"),
|
|
67
|
+
"Sharer Identifier": sharer.get("identifier"),
|
|
68
|
+
"Sharer Graph Id": sharer.get("graphId"),
|
|
69
|
+
"Sharer Principal Type": sharer.get("principalType"),
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
73
|
+
|
|
74
|
+
return df
|