semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
- sempy_labs/__init__.py +6 -2
- sempy_labs/_clear_cache.py +39 -37
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +14 -14
- sempy_labs/_generate_semantic_model.py +99 -62
- sempy_labs/_git.py +105 -43
- sempy_labs/_helper_functions.py +148 -131
- sempy_labs/_job_scheduler.py +92 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +114 -99
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +3 -3
- sempy_labs/_model_dependencies.py +55 -29
- sempy_labs/_notebooks.py +27 -25
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +67 -64
- sempy_labs/_refresh_semantic_model.py +25 -26
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +12 -9
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +34 -31
- sempy_labs/_warehouses.py +22 -21
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/_basic_functions.py +10 -12
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_items.py +4 -4
- sempy_labs/admin/_scanner.py +3 -1
- sempy_labs/directlake/_directlake_schema_compare.py +18 -14
- sempy_labs/directlake/_directlake_schema_sync.py +18 -12
- sempy_labs/directlake/_dl_helper.py +25 -26
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
- sempy_labs/lakehouse/_lakehouse.py +17 -13
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +40 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +13 -9
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +43 -6
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_item_name_and_id,
|
|
7
|
+
pagination,
|
|
8
|
+
)
|
|
9
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_item_job_instances(
|
|
14
|
+
item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
15
|
+
) -> pd.DataFrame:
|
|
16
|
+
"""
|
|
17
|
+
Returns a list of job instances for the specified item.
|
|
18
|
+
|
|
19
|
+
This is a wrapper function for the following API: `Job Scheduler - List Item Job Instances <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-job-instances>`_.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
item : str | uuid.UUID
|
|
24
|
+
The item name or ID
|
|
25
|
+
type : str, default=None
|
|
26
|
+
The item type. If specifying the item name as the item, the item type is required.
|
|
27
|
+
workspace : str | uuid.UUID, default=None
|
|
28
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
29
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
30
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
|
+
|
|
32
|
+
Returns
|
|
33
|
+
-------
|
|
34
|
+
pandas.DataFrame
|
|
35
|
+
Shows a list of job instances for the specified item.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
40
|
+
item=item, type=type, workspace=workspace
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
client = fabric.FabricRestClient()
|
|
44
|
+
response = client.get(
|
|
45
|
+
f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
if response.status_code != 200:
|
|
49
|
+
raise FabricHTTPException(response)
|
|
50
|
+
|
|
51
|
+
df = pd.DataFrame(
|
|
52
|
+
columns=[
|
|
53
|
+
"Job Instance Id",
|
|
54
|
+
"Item Name",
|
|
55
|
+
"Item Id",
|
|
56
|
+
"Item Type",
|
|
57
|
+
"Job Type",
|
|
58
|
+
"Invoke Type",
|
|
59
|
+
"Status",
|
|
60
|
+
"Root Activity Id" "Start Time UTC",
|
|
61
|
+
"End Time UTC",
|
|
62
|
+
"Failure Reason",
|
|
63
|
+
]
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
responses = pagination(client, response)
|
|
67
|
+
|
|
68
|
+
if not responses[0].get("value"):
|
|
69
|
+
return df
|
|
70
|
+
|
|
71
|
+
dfs = []
|
|
72
|
+
for r in responses:
|
|
73
|
+
for v in r.get("value", []):
|
|
74
|
+
new_data = {
|
|
75
|
+
"Job Instance Id": v.get("id"),
|
|
76
|
+
"Item Name": item_name,
|
|
77
|
+
"Item Id": v.get("itemId"),
|
|
78
|
+
"Item Type": type,
|
|
79
|
+
"Job Type": v.get("jobType"),
|
|
80
|
+
"Invoke Type": v.get("invokeType"),
|
|
81
|
+
"Status": v.get("status"),
|
|
82
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
83
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
84
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
85
|
+
"Failure Reason": v.get("failureReason"),
|
|
86
|
+
}
|
|
87
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
88
|
+
|
|
89
|
+
if dfs:
|
|
90
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
91
|
+
|
|
92
|
+
return df
|
sempy_labs/_kql_databases.py
CHANGED
|
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
pagination,
|
|
9
9
|
)
|
|
10
10
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
14
15
|
"""
|
|
15
16
|
Shows the KQL databases within a workspace.
|
|
16
17
|
|
|
@@ -18,8 +19,8 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
18
19
|
|
|
19
20
|
Parameters
|
|
20
21
|
----------
|
|
21
|
-
workspace : str, default=None
|
|
22
|
-
The Fabric workspace name.
|
|
22
|
+
workspace : str | uuid.UUID, default=None
|
|
23
|
+
The Fabric workspace name or ID.
|
|
23
24
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
24
25
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
25
26
|
|
|
@@ -41,7 +42,7 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
41
42
|
]
|
|
42
43
|
)
|
|
43
44
|
|
|
44
|
-
(
|
|
45
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
45
46
|
|
|
46
47
|
client = fabric.FabricRestClient()
|
|
47
48
|
response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
|
|
@@ -69,7 +70,7 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
69
70
|
|
|
70
71
|
|
|
71
72
|
def create_kql_database(
|
|
72
|
-
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
73
|
+
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
73
74
|
):
|
|
74
75
|
"""
|
|
75
76
|
Creates a KQL database.
|
|
@@ -82,13 +83,13 @@ def create_kql_database(
|
|
|
82
83
|
Name of the KQL database.
|
|
83
84
|
description : str, default=None
|
|
84
85
|
A description of the environment.
|
|
85
|
-
workspace : str, default=None
|
|
86
|
-
The Fabric workspace name.
|
|
86
|
+
workspace : str | uuid.UUID, default=None
|
|
87
|
+
The Fabric workspace name or ID.
|
|
87
88
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
88
89
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
89
90
|
"""
|
|
90
91
|
|
|
91
|
-
(
|
|
92
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
92
93
|
|
|
93
94
|
request_body = {"displayName": name}
|
|
94
95
|
|
|
@@ -103,11 +104,11 @@ def create_kql_database(
|
|
|
103
104
|
lro(client, response, status_codes=[201, 202])
|
|
104
105
|
|
|
105
106
|
print(
|
|
106
|
-
f"{icons.green_dot} The '{name}' KQL database has been created within the '{
|
|
107
|
+
f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace_name}' workspace."
|
|
107
108
|
)
|
|
108
109
|
|
|
109
110
|
|
|
110
|
-
def delete_kql_database(name: str, workspace: Optional[str] = None):
|
|
111
|
+
def delete_kql_database(name: str, workspace: Optional[str | UUID] = None):
|
|
111
112
|
"""
|
|
112
113
|
Deletes a KQL database.
|
|
113
114
|
|
|
@@ -117,15 +118,15 @@ def delete_kql_database(name: str, workspace: Optional[str] = None):
|
|
|
117
118
|
----------
|
|
118
119
|
name: str
|
|
119
120
|
Name of the KQL database.
|
|
120
|
-
workspace : str, default=None
|
|
121
|
-
The Fabric workspace name.
|
|
121
|
+
workspace : str | uuid.UUID, default=None
|
|
122
|
+
The Fabric workspace name or ID.
|
|
122
123
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
123
124
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
124
125
|
"""
|
|
125
126
|
|
|
126
|
-
(
|
|
127
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
127
128
|
kql_database_id = fabric.resolve_item_id(
|
|
128
|
-
item_name=name, type="KQLDatabase", workspace=
|
|
129
|
+
item_name=name, type="KQLDatabase", workspace=workspace_id
|
|
129
130
|
)
|
|
130
131
|
|
|
131
132
|
client = fabric.FabricRestClient()
|
|
@@ -136,5 +137,5 @@ def delete_kql_database(name: str, workspace: Optional[str] = None):
|
|
|
136
137
|
if response.status_code != 200:
|
|
137
138
|
raise FabricHTTPException(response)
|
|
138
139
|
print(
|
|
139
|
-
f"{icons.green_dot} The '{name}' KQL database within the '{
|
|
140
|
+
f"{icons.green_dot} The '{name}' KQL database within the '{workspace_name}' workspace has been deleted."
|
|
140
141
|
)
|
sempy_labs/_kql_querysets.py
CHANGED
|
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
pagination,
|
|
9
9
|
)
|
|
10
10
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
14
15
|
"""
|
|
15
16
|
Shows the KQL querysets within a workspace.
|
|
16
17
|
|
|
@@ -18,8 +19,8 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
18
19
|
|
|
19
20
|
Parameters
|
|
20
21
|
----------
|
|
21
|
-
workspace : str, default=None
|
|
22
|
-
The Fabric workspace name.
|
|
22
|
+
workspace : str | uuid.UUID, default=None
|
|
23
|
+
The Fabric workspace name or ID.
|
|
23
24
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
24
25
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
25
26
|
|
|
@@ -37,7 +38,7 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
37
38
|
]
|
|
38
39
|
)
|
|
39
40
|
|
|
40
|
-
(
|
|
41
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
41
42
|
|
|
42
43
|
client = fabric.FabricRestClient()
|
|
43
44
|
response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
|
|
@@ -59,7 +60,7 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
59
60
|
|
|
60
61
|
|
|
61
62
|
def create_kql_queryset(
|
|
62
|
-
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
63
|
+
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
63
64
|
):
|
|
64
65
|
"""
|
|
65
66
|
Creates a KQL queryset.
|
|
@@ -72,13 +73,13 @@ def create_kql_queryset(
|
|
|
72
73
|
Name of the KQL queryset.
|
|
73
74
|
description : str, default=None
|
|
74
75
|
A description of the environment.
|
|
75
|
-
workspace : str, default=None
|
|
76
|
-
The Fabric workspace name.
|
|
76
|
+
workspace : str | uuid.UUID, default=None
|
|
77
|
+
The Fabric workspace name or ID.
|
|
77
78
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
78
79
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
79
80
|
"""
|
|
80
81
|
|
|
81
|
-
(
|
|
82
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
82
83
|
|
|
83
84
|
request_body = {"displayName": name}
|
|
84
85
|
|
|
@@ -93,11 +94,11 @@ def create_kql_queryset(
|
|
|
93
94
|
lro(client, response, status_codes=[201, 202])
|
|
94
95
|
|
|
95
96
|
print(
|
|
96
|
-
f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{
|
|
97
|
+
f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace_name}' workspace."
|
|
97
98
|
)
|
|
98
99
|
|
|
99
100
|
|
|
100
|
-
def delete_kql_queryset(name: str, workspace: Optional[str] = None):
|
|
101
|
+
def delete_kql_queryset(name: str, workspace: Optional[str | UUID] = None):
|
|
101
102
|
"""
|
|
102
103
|
Deletes a KQL queryset.
|
|
103
104
|
|
|
@@ -107,15 +108,15 @@ def delete_kql_queryset(name: str, workspace: Optional[str] = None):
|
|
|
107
108
|
----------
|
|
108
109
|
name: str
|
|
109
110
|
Name of the KQL queryset.
|
|
110
|
-
workspace : str, default=None
|
|
111
|
-
The Fabric workspace name.
|
|
111
|
+
workspace : str | uuid.UUID, default=None
|
|
112
|
+
The Fabric workspace name or ID.
|
|
112
113
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
113
114
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
114
115
|
"""
|
|
115
116
|
|
|
116
|
-
(
|
|
117
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
117
118
|
kql_database_id = fabric.resolve_item_id(
|
|
118
|
-
item_name=name, type="KQLQueryset", workspace=
|
|
119
|
+
item_name=name, type="KQLQueryset", workspace=workspace_id
|
|
119
120
|
)
|
|
120
121
|
|
|
121
122
|
client = fabric.FabricRestClient()
|
|
@@ -126,5 +127,5 @@ def delete_kql_queryset(name: str, workspace: Optional[str] = None):
|
|
|
126
127
|
if response.status_code != 200:
|
|
127
128
|
raise FabricHTTPException(response)
|
|
128
129
|
print(
|
|
129
|
-
f"{icons.green_dot} The '{name}' KQL queryset within the '{
|
|
130
|
+
f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace_name}' workspace has been deleted."
|
|
130
131
|
)
|