semantic-link-labs 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/METADATA +39 -7
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/RECORD +47 -37
- sempy_labs/__init__.py +70 -51
- sempy_labs/_ai.py +0 -2
- sempy_labs/_capacity_migration.py +1 -2
- sempy_labs/_data_pipelines.py +118 -0
- sempy_labs/_documentation.py +144 -0
- sempy_labs/_eventhouses.py +118 -0
- sempy_labs/_eventstreams.py +118 -0
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +3 -3
- sempy_labs/_helper_functions.py +117 -26
- sempy_labs/_icons.py +21 -0
- sempy_labs/_kql_databases.py +134 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_list_functions.py +12 -425
- sempy_labs/_mirrored_warehouses.py +50 -0
- sempy_labs/_ml_experiments.py +122 -0
- sempy_labs/_ml_models.py +120 -0
- sempy_labs/_model_auto_build.py +0 -4
- sempy_labs/_model_bpa.py +11 -11
- sempy_labs/_model_bpa_bulk.py +8 -7
- sempy_labs/_model_dependencies.py +26 -18
- sempy_labs/_notebooks.py +5 -16
- sempy_labs/_query_scale_out.py +2 -2
- sempy_labs/_refresh_semantic_model.py +7 -19
- sempy_labs/_spark.py +10 -10
- sempy_labs/_vertipaq.py +16 -18
- sempy_labs/_warehouses.py +132 -0
- sempy_labs/_workspaces.py +0 -3
- sempy_labs/admin/_basic_functions.py +92 -10
- sempy_labs/admin/_domains.py +1 -1
- sempy_labs/directlake/_directlake_schema_sync.py +1 -1
- sempy_labs/directlake/_dl_helper.py +32 -16
- sempy_labs/directlake/_guardrails.py +7 -7
- sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
- sempy_labs/directlake/_warm_cache.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
- sempy_labs/lakehouse/_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_report_bpa.py +13 -3
- sempy_labs/report/_reportwrapper.py +14 -16
- sempy_labs/tom/_model.py +261 -24
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/top_level.txt +0 -0
sempy_labs/_icons.py
CHANGED
|
@@ -93,3 +93,24 @@ sku_mapping = {
|
|
|
93
93
|
"P4": "F512",
|
|
94
94
|
"P5": "F1024",
|
|
95
95
|
}
|
|
96
|
+
refreshTypes = [
|
|
97
|
+
"full",
|
|
98
|
+
"automatic",
|
|
99
|
+
"dataOnly",
|
|
100
|
+
"calculate",
|
|
101
|
+
"clearValues",
|
|
102
|
+
"defragment",
|
|
103
|
+
]
|
|
104
|
+
|
|
105
|
+
itemTypes = {
|
|
106
|
+
"DataPipeline": "dataPipelines",
|
|
107
|
+
"Eventstream": "eventstreams",
|
|
108
|
+
"KQLDatabase": "kqlDatabases",
|
|
109
|
+
"KQLQueryset": "kqlQuerysets",
|
|
110
|
+
"Lakehouse": "lakehouses",
|
|
111
|
+
"MLExperiment": "mlExperiments",
|
|
112
|
+
"MLModel": "mlModels",
|
|
113
|
+
"Notebook": "notebooks",
|
|
114
|
+
"Warehouse": "warehouses",
|
|
115
|
+
}
|
|
116
|
+
default_schema = "dbo"
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the KQL databases within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the KQL databases within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(
|
|
31
|
+
columns=[
|
|
32
|
+
"KQL Database Name",
|
|
33
|
+
"KQL Database Id",
|
|
34
|
+
"Description",
|
|
35
|
+
"Parent Eventhouse Item Id",
|
|
36
|
+
"Query Service URI",
|
|
37
|
+
"Ingestion Service URI",
|
|
38
|
+
"Database Type",
|
|
39
|
+
]
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
43
|
+
|
|
44
|
+
client = fabric.FabricRestClient()
|
|
45
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
|
|
46
|
+
if response.status_code != 200:
|
|
47
|
+
raise FabricHTTPException(response)
|
|
48
|
+
|
|
49
|
+
responses = pagination(client, response)
|
|
50
|
+
|
|
51
|
+
for r in responses:
|
|
52
|
+
for v in r.get("value", []):
|
|
53
|
+
prop = v.get("properties", {})
|
|
54
|
+
|
|
55
|
+
new_data = {
|
|
56
|
+
"KQL Database Name": v.get("displayName"),
|
|
57
|
+
"KQL Database Id": v.get("id"),
|
|
58
|
+
"Description": v.get("description"),
|
|
59
|
+
"Parent Eventhouse Item Id": prop.get("parentEventhouseItemId"),
|
|
60
|
+
"Query Service URI": prop.get("queryServiceUri"),
|
|
61
|
+
"Ingestion Service URI": prop.get("ingestionServiceUri"),
|
|
62
|
+
"Database Type": prop.get("databaseType"),
|
|
63
|
+
}
|
|
64
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
65
|
+
|
|
66
|
+
return df
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def create_kql_database(
|
|
70
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
71
|
+
):
|
|
72
|
+
"""
|
|
73
|
+
Creates a KQL database.
|
|
74
|
+
|
|
75
|
+
Parameters
|
|
76
|
+
----------
|
|
77
|
+
name: str
|
|
78
|
+
Name of the KQL database.
|
|
79
|
+
description : str, default=None
|
|
80
|
+
A description of the environment.
|
|
81
|
+
workspace : str, default=None
|
|
82
|
+
The Fabric workspace name.
|
|
83
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
84
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
88
|
+
|
|
89
|
+
request_body = {"displayName": name}
|
|
90
|
+
|
|
91
|
+
if description:
|
|
92
|
+
request_body["description"] = description
|
|
93
|
+
|
|
94
|
+
client = fabric.FabricRestClient()
|
|
95
|
+
response = client.post(
|
|
96
|
+
f"/v1/workspaces/{workspace_id}/kqlDatabases", json=request_body
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
lro(client, response, status_codes=[201, 202])
|
|
100
|
+
|
|
101
|
+
print(
|
|
102
|
+
f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace}' workspace."
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def delete_kql_database(name: str, workspace: Optional[str] = None):
|
|
107
|
+
"""
|
|
108
|
+
Deletes a KQL database.
|
|
109
|
+
|
|
110
|
+
Parameters
|
|
111
|
+
----------
|
|
112
|
+
name: str
|
|
113
|
+
Name of the KQL database.
|
|
114
|
+
workspace : str, default=None
|
|
115
|
+
The Fabric workspace name.
|
|
116
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
117
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
121
|
+
kql_database_id = fabric.resolve_item_id(
|
|
122
|
+
item_name=name, type="KQLDatabase", workspace=workspace
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
client = fabric.FabricRestClient()
|
|
126
|
+
response = client.delete(
|
|
127
|
+
f"/v1/workspaces/{workspace_id}/kqlDatabases/{kql_database_id}"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
if response.status_code != 200:
|
|
131
|
+
raise FabricHTTPException(response)
|
|
132
|
+
print(
|
|
133
|
+
f"{icons.green_dot} The '{name}' KQL database within the '{workspace}' workspace has been deleted."
|
|
134
|
+
)
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the KQL querysets within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the KQL querysets within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(
|
|
31
|
+
columns=[
|
|
32
|
+
"KQL Queryset Name",
|
|
33
|
+
"KQL Queryset Id",
|
|
34
|
+
"Description",
|
|
35
|
+
]
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
|
+
|
|
40
|
+
client = fabric.FabricRestClient()
|
|
41
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
|
|
42
|
+
if response.status_code != 200:
|
|
43
|
+
raise FabricHTTPException(response)
|
|
44
|
+
|
|
45
|
+
responses = pagination(client, response)
|
|
46
|
+
|
|
47
|
+
for r in responses:
|
|
48
|
+
for v in r.get("value", []):
|
|
49
|
+
new_data = {
|
|
50
|
+
"KQL Queryset Name": v.get("displayName"),
|
|
51
|
+
"KQL Queryset Id": v.get("id"),
|
|
52
|
+
"Description": v.get("description"),
|
|
53
|
+
}
|
|
54
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
55
|
+
|
|
56
|
+
return df
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def create_kql_queryset(
|
|
60
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
61
|
+
):
|
|
62
|
+
"""
|
|
63
|
+
Creates a KQL queryset.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
name: str
|
|
68
|
+
Name of the KQL queryset.
|
|
69
|
+
description : str, default=None
|
|
70
|
+
A description of the environment.
|
|
71
|
+
workspace : str, default=None
|
|
72
|
+
The Fabric workspace name.
|
|
73
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
74
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
78
|
+
|
|
79
|
+
request_body = {"displayName": name}
|
|
80
|
+
|
|
81
|
+
if description:
|
|
82
|
+
request_body["description"] = description
|
|
83
|
+
|
|
84
|
+
client = fabric.FabricRestClient()
|
|
85
|
+
response = client.post(
|
|
86
|
+
f"/v1/workspaces/{workspace_id}/kqlQuerysets", json=request_body
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
lro(client, response, status_codes=[201, 202])
|
|
90
|
+
|
|
91
|
+
print(
|
|
92
|
+
f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace}' workspace."
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def delete_kql_queryset(name: str, workspace: Optional[str] = None):
|
|
97
|
+
"""
|
|
98
|
+
Deletes a KQL queryset.
|
|
99
|
+
|
|
100
|
+
Parameters
|
|
101
|
+
----------
|
|
102
|
+
name: str
|
|
103
|
+
Name of the KQL queryset.
|
|
104
|
+
workspace : str, default=None
|
|
105
|
+
The Fabric workspace name.
|
|
106
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
107
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
111
|
+
kql_database_id = fabric.resolve_item_id(
|
|
112
|
+
item_name=name, type="KQLQueryset", workspace=workspace
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
client = fabric.FabricRestClient()
|
|
116
|
+
response = client.delete(
|
|
117
|
+
f"/v1/workspaces/{workspace_id}/kqlQuerysets/{kql_database_id}"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if response.status_code != 200:
|
|
121
|
+
raise FabricHTTPException(response)
|
|
122
|
+
print(
|
|
123
|
+
f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace}' workspace has been deleted."
|
|
124
|
+
)
|