semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/METADATA +7 -3
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/RECORD +32 -23
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +57 -18
- sempy_labs/_capacities.py +39 -3
- sempy_labs/_capacity_migration.py +624 -0
- sempy_labs/_clear_cache.py +8 -8
- sempy_labs/_connections.py +15 -13
- sempy_labs/_git.py +20 -21
- sempy_labs/_helper_functions.py +33 -30
- sempy_labs/_icons.py +19 -0
- sempy_labs/_list_functions.py +210 -0
- sempy_labs/_model_bpa.py +1 -1
- sempy_labs/_query_scale_out.py +4 -3
- sempy_labs/_spark.py +31 -36
- sempy_labs/_sql.py +60 -15
- sempy_labs/_vertipaq.py +9 -7
- sempy_labs/admin/__init__.py +53 -0
- sempy_labs/admin/_basic_functions.py +806 -0
- sempy_labs/admin/_domains.py +411 -0
- sempy_labs/directlake/_generate_shared_expression.py +11 -14
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
- sempy_labs/report/__init__.py +9 -6
- sempy_labs/report/_report_bpa.py +359 -0
- sempy_labs/report/_report_bpa_rules.py +113 -0
- sempy_labs/report/_report_helper.py +254 -0
- sempy_labs/report/_report_list_functions.py +95 -0
- sempy_labs/report/_report_rebind.py +0 -4
- sempy_labs/report/_reportwrapper.py +2039 -0
- sempy_labs/tom/_model.py +78 -4
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/top_level.txt +0 -0
sempy_labs/_sql.py
CHANGED
|
@@ -6,7 +6,7 @@ import struct
|
|
|
6
6
|
import uuid
|
|
7
7
|
from itertools import chain, repeat
|
|
8
8
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
9
|
-
from sempy_labs._helper_functions import resolve_warehouse_id
|
|
9
|
+
from sempy_labs._helper_functions import resolve_warehouse_id, resolve_lakehouse_id
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def bytes2mswin_bstr(value: bytes) -> bytes:
|
|
@@ -28,30 +28,48 @@ def bytes2mswin_bstr(value: bytes) -> bytes:
|
|
|
28
28
|
return struct.pack("<i", len(encoded_bytes)) + encoded_bytes
|
|
29
29
|
|
|
30
30
|
|
|
31
|
-
class
|
|
31
|
+
class ConnectBase:
|
|
32
32
|
def __init__(
|
|
33
33
|
self,
|
|
34
|
-
|
|
34
|
+
name: str,
|
|
35
35
|
workspace: Optional[Union[str, uuid.UUID]] = None,
|
|
36
36
|
timeout: Optional[int] = None,
|
|
37
|
+
endpoint_type: str = "warehouse",
|
|
37
38
|
):
|
|
38
39
|
from sempy.fabric._token_provider import SynapseTokenProvider
|
|
39
40
|
import pyodbc
|
|
40
41
|
|
|
41
42
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
42
43
|
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
43
|
-
warehouse_id = resolve_warehouse_id(warehouse=warehouse, workspace=workspace)
|
|
44
44
|
|
|
45
|
-
#
|
|
45
|
+
# Resolve the appropriate ID (warehouse or lakehouse)
|
|
46
|
+
if endpoint_type == "warehouse":
|
|
47
|
+
resource_id = resolve_warehouse_id(warehouse=name, workspace=workspace)
|
|
48
|
+
else:
|
|
49
|
+
resource_id = resolve_lakehouse_id(lakehouse=name, workspace=workspace)
|
|
50
|
+
|
|
51
|
+
# Get the TDS endpoint
|
|
46
52
|
client = fabric.FabricRestClient()
|
|
47
|
-
response = client.get(
|
|
53
|
+
response = client.get(
|
|
54
|
+
f"v1/workspaces/{workspace_id}/{endpoint_type}s/{resource_id}"
|
|
55
|
+
)
|
|
48
56
|
if response.status_code != 200:
|
|
49
57
|
raise FabricHTTPException(response)
|
|
50
|
-
tds_endpoint = response.json().get("properties", {}).get("connectionString")
|
|
51
58
|
|
|
59
|
+
if endpoint_type == "warehouse":
|
|
60
|
+
tds_endpoint = response.json().get("properties", {}).get("connectionString")
|
|
61
|
+
else:
|
|
62
|
+
tds_endpoint = (
|
|
63
|
+
response.json()
|
|
64
|
+
.get("properties", {})
|
|
65
|
+
.get("sqlEndpointProperties", {})
|
|
66
|
+
.get("connectionString")
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Set up the connection string
|
|
52
70
|
access_token = SynapseTokenProvider()()
|
|
53
71
|
tokenstruct = bytes2mswin_bstr(access_token.encode())
|
|
54
|
-
conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={
|
|
72
|
+
conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={name};Encrypt=Yes;"
|
|
55
73
|
|
|
56
74
|
if timeout is not None:
|
|
57
75
|
conn_str += f"Connect Timeout={timeout};"
|
|
@@ -63,7 +81,7 @@ class ConnectWarehouse:
|
|
|
63
81
|
self, sql: Union[str, List[str]]
|
|
64
82
|
) -> Union[List[pd.DataFrame], pd.DataFrame, None]:
|
|
65
83
|
"""
|
|
66
|
-
Runs a SQL or T-SQL query (or multiple queries) against a Fabric Warehouse.
|
|
84
|
+
Runs a SQL or T-SQL query (or multiple queries) against a Fabric Warehouse/Lakehouse.
|
|
67
85
|
|
|
68
86
|
Parameters
|
|
69
87
|
----------
|
|
@@ -76,10 +94,10 @@ class ConnectWarehouse:
|
|
|
76
94
|
A list of pandas DataFrames if multiple SQL queries return results,
|
|
77
95
|
a single DataFrame if one query is executed and returns results, or None.
|
|
78
96
|
"""
|
|
97
|
+
|
|
79
98
|
cursor = None
|
|
80
|
-
results = []
|
|
99
|
+
results = []
|
|
81
100
|
|
|
82
|
-
# If the input is a single string, convert it to a list for consistency
|
|
83
101
|
if isinstance(sql, str):
|
|
84
102
|
sql = [sql]
|
|
85
103
|
|
|
@@ -101,10 +119,7 @@ class ConnectWarehouse:
|
|
|
101
119
|
results.append(result)
|
|
102
120
|
|
|
103
121
|
# Return results if any queries returned a result set
|
|
104
|
-
if results
|
|
105
|
-
return results if len(results) > 1 else results[0]
|
|
106
|
-
else:
|
|
107
|
-
return None
|
|
122
|
+
return results if len(results) > 1 else (results[0] if results else None)
|
|
108
123
|
|
|
109
124
|
finally:
|
|
110
125
|
if cursor:
|
|
@@ -118,3 +133,33 @@ class ConnectWarehouse:
|
|
|
118
133
|
|
|
119
134
|
def close(self):
|
|
120
135
|
self.connection.close()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class ConnectWarehouse(ConnectBase):
|
|
139
|
+
def __init__(
|
|
140
|
+
self,
|
|
141
|
+
warehouse: str,
|
|
142
|
+
workspace: Optional[Union[str, uuid.UUID]] = None,
|
|
143
|
+
timeout: Optional[int] = None,
|
|
144
|
+
):
|
|
145
|
+
super().__init__(
|
|
146
|
+
name=warehouse,
|
|
147
|
+
workspace=workspace,
|
|
148
|
+
timeout=timeout,
|
|
149
|
+
endpoint_type="warehouse",
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class ConnectLakehouse(ConnectBase):
|
|
154
|
+
def __init__(
|
|
155
|
+
self,
|
|
156
|
+
lakehouse: str,
|
|
157
|
+
workspace: Optional[Union[str, uuid.UUID]] = None,
|
|
158
|
+
timeout: Optional[int] = None,
|
|
159
|
+
):
|
|
160
|
+
super().__init__(
|
|
161
|
+
name=lakehouse,
|
|
162
|
+
workspace=workspace,
|
|
163
|
+
timeout=timeout,
|
|
164
|
+
endpoint_type="lakehouse",
|
|
165
|
+
)
|
sempy_labs/_vertipaq.py
CHANGED
|
@@ -551,13 +551,13 @@ def vertipaq_analyzer(
|
|
|
551
551
|
df.columns = df.columns.str.replace(" ", "_")
|
|
552
552
|
|
|
553
553
|
schema = {
|
|
554
|
-
"Capacity_Name":
|
|
555
|
-
"Capacity_Id":
|
|
556
|
-
"Workspace_Name":
|
|
557
|
-
"Workspace_Id":
|
|
558
|
-
"Dataset_Name":
|
|
559
|
-
"Dataset_Id":
|
|
560
|
-
"Configured_By":
|
|
554
|
+
"Capacity_Name": data_type_string,
|
|
555
|
+
"Capacity_Id": data_type_string,
|
|
556
|
+
"Workspace_Name": data_type_string,
|
|
557
|
+
"Workspace_Id": data_type_string,
|
|
558
|
+
"Dataset_Name": data_type_string,
|
|
559
|
+
"Dataset_Id": data_type_string,
|
|
560
|
+
"Configured_By": data_type_string,
|
|
561
561
|
}
|
|
562
562
|
|
|
563
563
|
schema.update(
|
|
@@ -566,6 +566,8 @@ def vertipaq_analyzer(
|
|
|
566
566
|
for key, value in vertipaq_map[key_name].items()
|
|
567
567
|
}
|
|
568
568
|
)
|
|
569
|
+
schema["RunId"] = data_type_long
|
|
570
|
+
schema["Timestamp"] = data_type_timestamp
|
|
569
571
|
|
|
570
572
|
delta_table_name = f"VertipaqAnalyzer_{obj}".lower()
|
|
571
573
|
save_as_delta_table(
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from sempy_labs.admin._basic_functions import (
|
|
2
|
+
assign_workspaces_to_capacity,
|
|
3
|
+
list_capacities,
|
|
4
|
+
list_tenant_settings,
|
|
5
|
+
list_capacities_delegated_tenant_settings,
|
|
6
|
+
unassign_workspaces_from_capacity,
|
|
7
|
+
list_external_data_shares,
|
|
8
|
+
revoke_external_data_share,
|
|
9
|
+
list_workspaces,
|
|
10
|
+
list_datasets,
|
|
11
|
+
list_item_access_details,
|
|
12
|
+
list_access_entities,
|
|
13
|
+
list_workspace_access_details,
|
|
14
|
+
list_items,
|
|
15
|
+
)
|
|
16
|
+
from sempy_labs.admin._domains import (
|
|
17
|
+
list_domains,
|
|
18
|
+
list_domain_workspaces,
|
|
19
|
+
assign_domain_workspaces,
|
|
20
|
+
assign_domain_workspaces_by_capacities,
|
|
21
|
+
create_domain,
|
|
22
|
+
update_domain,
|
|
23
|
+
delete_domain,
|
|
24
|
+
resolve_domain_id,
|
|
25
|
+
unassign_domain_workspaces,
|
|
26
|
+
unassign_all_domain_workspaces,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
__all__ = [
|
|
30
|
+
"list_items",
|
|
31
|
+
"list_workspace_access_details",
|
|
32
|
+
"list_access_entities",
|
|
33
|
+
"list_item_access_details",
|
|
34
|
+
"list_datasets",
|
|
35
|
+
"list_workspaces",
|
|
36
|
+
"assign_workspaces_to_capacity",
|
|
37
|
+
"list_capacities",
|
|
38
|
+
"list_tenant_settings",
|
|
39
|
+
"list_domains",
|
|
40
|
+
"list_domain_workspaces",
|
|
41
|
+
"assign_domain_workspaces",
|
|
42
|
+
"assign_domain_workspaces_by_capacities",
|
|
43
|
+
"create_domain",
|
|
44
|
+
"update_domain",
|
|
45
|
+
"delete_domain",
|
|
46
|
+
"resolve_domain_id",
|
|
47
|
+
"unassign_domain_workspaces",
|
|
48
|
+
"unassign_all_domain_workspaces",
|
|
49
|
+
"list_capacities_delegated_tenant_settings",
|
|
50
|
+
"unassign_workspaces_from_capacity",
|
|
51
|
+
"list_external_data_shares",
|
|
52
|
+
"revoke_external_data_share",
|
|
53
|
+
]
|