semantic-link-labs 0.11.3__py3-none-any.whl → 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/METADATA +6 -4
- {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/RECORD +75 -73
- sempy_labs/__init__.py +6 -0
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_authentication.py +85 -32
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +4 -4
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +107 -70
- sempy_labs/_dashboards.py +6 -2
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +1 -1
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +1 -1
- sempy_labs/_eventhouses.py +9 -3
- sempy_labs/_eventstreams.py +1 -1
- sempy_labs/_external_data_shares.py +1 -1
- sempy_labs/_gateways.py +14 -7
- sempy_labs/_generate_semantic_model.py +7 -12
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +1 -1
- sempy_labs/_helper_functions.py +161 -54
- sempy_labs/_job_scheduler.py +12 -1
- sempy_labs/_kql_databases.py +1 -1
- sempy_labs/_kql_querysets.py +10 -2
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +1 -1
- sempy_labs/_mirrored_databases.py +40 -16
- sempy_labs/_mirrored_warehouses.py +1 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +6 -6
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +3 -3
- sempy_labs/_notebooks.py +2 -1
- sempy_labs/_query_scale_out.py +2 -2
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +15 -3
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +3 -3
- sempy_labs/_sql_endpoints.py +5 -3
- sempy_labs/_sqldatabase.py +5 -1
- sempy_labs/_tags.py +3 -1
- sempy_labs/_translations.py +7 -360
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_utils.py +27 -0
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +5 -0
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +145 -11
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_capacities.py +37 -14
- sempy_labs/admin/_items.py +2 -2
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/directlake/_dl_helper.py +1 -1
- sempy_labs/lakehouse/__init__.py +4 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
- sempy_labs/lakehouse/_shortcuts.py +8 -2
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +12 -5
- sempy_labs/report/_generate_report.py +11 -3
- sempy_labs/report/_paginated.py +21 -15
- sempy_labs/report/_report_functions.py +19 -11
- sempy_labs/report/_report_rebind.py +56 -33
- sempy_labs/theme/_org_themes.py +5 -6
- sempy_labs/tom/_model.py +5 -16
- {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/top_level.txt +0 -0
sempy_labs/_model_bpa.py
CHANGED
|
@@ -3,8 +3,8 @@ import pandas as pd
|
|
|
3
3
|
import warnings
|
|
4
4
|
import datetime
|
|
5
5
|
from IPython.display import display, HTML
|
|
6
|
-
from ._model_dependencies import get_model_calc_dependencies
|
|
7
|
-
from ._helper_functions import (
|
|
6
|
+
from sempy_labs._model_dependencies import get_model_calc_dependencies
|
|
7
|
+
from sempy_labs._helper_functions import (
|
|
8
8
|
format_dax_object_name,
|
|
9
9
|
create_relationship_name,
|
|
10
10
|
save_as_delta_table,
|
|
@@ -15,9 +15,9 @@ from ._helper_functions import (
|
|
|
15
15
|
resolve_workspace_name_and_id,
|
|
16
16
|
_create_spark_session,
|
|
17
17
|
)
|
|
18
|
-
from .lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
19
|
-
from .tom import connect_semantic_model
|
|
20
|
-
from ._model_bpa_rules import model_bpa_rules
|
|
18
|
+
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
19
|
+
from sempy_labs.tom import connect_semantic_model
|
|
20
|
+
from sempy_labs._model_bpa_rules import model_bpa_rules
|
|
21
21
|
from typing import Optional
|
|
22
22
|
from sempy._utils._log import log
|
|
23
23
|
import sempy_labs._icons as icons
|
|
@@ -391,7 +391,7 @@ def run_model_bpa(
|
|
|
391
391
|
lakeT = get_lakehouse_tables()
|
|
392
392
|
lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
|
|
393
393
|
|
|
394
|
-
dfExport["Severity"].replace(icons.severity_mapping
|
|
394
|
+
dfExport["Severity"] = dfExport["Severity"].replace(icons.severity_mapping)
|
|
395
395
|
|
|
396
396
|
if len(lakeT_filt) == 0:
|
|
397
397
|
runId = 1
|
sempy_labs/_model_bpa_bulk.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import datetime
|
|
4
|
-
from ._helper_functions import (
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
save_as_delta_table,
|
|
6
6
|
resolve_workspace_capacity,
|
|
7
7
|
retry,
|
|
@@ -9,11 +9,11 @@ from ._helper_functions import (
|
|
|
9
9
|
resolve_workspace_id,
|
|
10
10
|
resolve_lakehouse_name_and_id,
|
|
11
11
|
)
|
|
12
|
-
from .lakehouse import (
|
|
12
|
+
from sempy_labs.lakehouse import (
|
|
13
13
|
get_lakehouse_tables,
|
|
14
14
|
lakehouse_attached,
|
|
15
15
|
)
|
|
16
|
-
from ._model_bpa import run_model_bpa
|
|
16
|
+
from sempy_labs._model_bpa import run_model_bpa
|
|
17
17
|
from typing import Optional, List
|
|
18
18
|
from sempy._utils._log import log
|
|
19
19
|
import sempy_labs._icons as icons
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from ._helper_functions import (
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
8
|
-
|
|
8
|
+
_get_item_definition,
|
|
9
9
|
delete_item,
|
|
10
10
|
)
|
|
11
11
|
|
|
@@ -91,7 +91,7 @@ def get_mounted_data_factory_definition(
|
|
|
91
91
|
The 'mountedDataFactory-content.json' file from the mounted data factory definition.
|
|
92
92
|
"""
|
|
93
93
|
|
|
94
|
-
return
|
|
94
|
+
return _get_item_definition(
|
|
95
95
|
item=mounted_data_factory,
|
|
96
96
|
type="MountedDataFactory",
|
|
97
97
|
workspace=workspace,
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -17,6 +17,7 @@ from sempy_labs._helper_functions import (
|
|
|
17
17
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
18
18
|
from os import PathLike
|
|
19
19
|
from uuid import UUID
|
|
20
|
+
import os
|
|
20
21
|
|
|
21
22
|
_notebook_prefix = "notebook-content."
|
|
22
23
|
|
|
@@ -315,7 +316,7 @@ def list_notebooks(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
315
316
|
Returns
|
|
316
317
|
-------
|
|
317
318
|
pandas.DataFrame
|
|
318
|
-
A pandas dataframe showing the
|
|
319
|
+
A pandas dataframe showing the notebooks within a workspace.
|
|
319
320
|
"""
|
|
320
321
|
|
|
321
322
|
columns = {
|
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from ._helper_functions import (
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
5
|
resolve_dataset_name_and_id,
|
|
6
6
|
_update_dataframe_datatypes,
|
|
@@ -406,7 +406,7 @@ def set_workspace_default_storage_format(
|
|
|
406
406
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
407
407
|
|
|
408
408
|
# Check current storage format
|
|
409
|
-
dfW = fabric.list_workspaces(filter=f"
|
|
409
|
+
dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
|
|
410
410
|
if len(dfW) == 0:
|
|
411
411
|
raise ValueError()
|
|
412
412
|
current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
|
sempy_labs/_semantic_models.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from ._helper_functions import (
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_base_api,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
@@ -23,6 +23,8 @@ def get_semantic_model_refresh_schedule(
|
|
|
23
23
|
"""
|
|
24
24
|
Gets the refresh schedule for the specified dataset from the specified workspace.
|
|
25
25
|
|
|
26
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
27
|
+
|
|
26
28
|
Parameters
|
|
27
29
|
----------
|
|
28
30
|
dataset : str | uuid.UUID
|
|
@@ -38,7 +40,7 @@ def get_semantic_model_refresh_schedule(
|
|
|
38
40
|
Shows the refresh schedule for the specified dataset from the specified workspace.
|
|
39
41
|
"""
|
|
40
42
|
|
|
41
|
-
|
|
43
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
42
44
|
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
|
|
43
45
|
|
|
44
46
|
columns = {
|
|
@@ -60,7 +62,8 @@ def get_semantic_model_refresh_schedule(
|
|
|
60
62
|
df = _create_dataframe(columns)
|
|
61
63
|
|
|
62
64
|
result = _base_api(
|
|
63
|
-
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule"
|
|
65
|
+
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
|
|
66
|
+
client="fabric_sp",
|
|
64
67
|
).json()
|
|
65
68
|
|
|
66
69
|
df = (
|
|
@@ -83,6 +86,8 @@ def enable_semantic_model_scheduled_refresh(
|
|
|
83
86
|
"""
|
|
84
87
|
Enables the scheduled refresh for the specified dataset from the specified workspace.
|
|
85
88
|
|
|
89
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
90
|
+
|
|
86
91
|
Parameters
|
|
87
92
|
----------
|
|
88
93
|
dataset : str | uuid.UUID
|
|
@@ -117,6 +122,7 @@ def enable_semantic_model_scheduled_refresh(
|
|
|
117
122
|
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
|
|
118
123
|
method="patch",
|
|
119
124
|
payload=payload,
|
|
125
|
+
client="fabric_sp",
|
|
120
126
|
)
|
|
121
127
|
|
|
122
128
|
print(
|
|
@@ -131,6 +137,8 @@ def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] =
|
|
|
131
137
|
|
|
132
138
|
This is a wrapper function for the following API: `Items - Delete Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/delete-semantic-model>`_.
|
|
133
139
|
|
|
140
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
141
|
+
|
|
134
142
|
Parameters
|
|
135
143
|
----------
|
|
136
144
|
dataset: str | uuid.UUID
|
|
@@ -157,6 +165,8 @@ def update_semantic_model_refresh_schedule(
|
|
|
157
165
|
|
|
158
166
|
This is a wrapper function for the following API: `Datasets - Update Refresh Schedule In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-refresh-schedule-in-group>`_.
|
|
159
167
|
|
|
168
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
169
|
+
|
|
160
170
|
Parameters
|
|
161
171
|
----------
|
|
162
172
|
dataset : str | uuid.UUID
|
|
@@ -247,6 +257,8 @@ def list_semantic_model_datasources(
|
|
|
247
257
|
|
|
248
258
|
This is a wrapper function for the following API: `Datasets - Get Datasources In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-datasources-in-group>`_.
|
|
249
259
|
|
|
260
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
261
|
+
|
|
250
262
|
Parameters
|
|
251
263
|
----------
|
|
252
264
|
dataset : str | uuid.UUID
|
sempy_labs/_spark.py
CHANGED
sempy_labs/_sql.py
CHANGED
|
@@ -3,7 +3,7 @@ from typing import Optional, Union, List
|
|
|
3
3
|
from sempy._utils._log import log
|
|
4
4
|
import struct
|
|
5
5
|
from itertools import chain, repeat
|
|
6
|
-
from ._helper_functions import (
|
|
6
|
+
from sempy_labs._helper_functions import (
|
|
7
7
|
resolve_lakehouse_name_and_id,
|
|
8
8
|
resolve_item_name_and_id,
|
|
9
9
|
resolve_workspace_name_and_id,
|
|
@@ -39,7 +39,7 @@ class ConnectBase:
|
|
|
39
39
|
timeout: Optional[int] = None,
|
|
40
40
|
endpoint_type: str = "warehouse",
|
|
41
41
|
):
|
|
42
|
-
from sempy.fabric.
|
|
42
|
+
from sempy.fabric._credentials import get_access_token
|
|
43
43
|
import pyodbc
|
|
44
44
|
|
|
45
45
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
@@ -82,7 +82,7 @@ class ConnectBase:
|
|
|
82
82
|
)
|
|
83
83
|
|
|
84
84
|
# Set up the connection string
|
|
85
|
-
access_token =
|
|
85
|
+
access_token = get_access_token("sql").token
|
|
86
86
|
tokenstruct = _bytes2mswin_bstr(access_token.encode())
|
|
87
87
|
if endpoint_type == "sqldatabase":
|
|
88
88
|
conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name}-{resource_id};Encrypt=Yes;"
|
sempy_labs/_sql_endpoints.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from typing import Optional, Literal
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from ._helper_functions import (
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
@@ -73,6 +73,8 @@ def refresh_sql_endpoint_metadata(
|
|
|
73
73
|
|
|
74
74
|
This is a wrapper function for the following API: `Items - Refresh Sql Endpoint Metadata <https://learn.microsoft.com/rest/api/fabric/sqlendpoint/items/refresh-sql-endpoint-metadata>`_.
|
|
75
75
|
|
|
76
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
77
|
+
|
|
76
78
|
Parameters
|
|
77
79
|
----------
|
|
78
80
|
item : str | uuid.UUID
|
|
@@ -140,7 +142,7 @@ def refresh_sql_endpoint_metadata(
|
|
|
140
142
|
}
|
|
141
143
|
|
|
142
144
|
result = _base_api(
|
|
143
|
-
request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata
|
|
145
|
+
request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
|
|
144
146
|
method="post",
|
|
145
147
|
client="fabric_sp",
|
|
146
148
|
status_codes=[200, 202],
|
|
@@ -159,7 +161,7 @@ def refresh_sql_endpoint_metadata(
|
|
|
159
161
|
}
|
|
160
162
|
|
|
161
163
|
if result:
|
|
162
|
-
df = pd.json_normalize(result)
|
|
164
|
+
df = pd.json_normalize(result.get("value"))
|
|
163
165
|
|
|
164
166
|
# Extract error code and message, set to None if no error
|
|
165
167
|
df["Error Code"] = df.get("error.errorCode", None)
|
sempy_labs/_sqldatabase.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from ._helper_functions import (
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
2
|
resolve_workspace_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -21,6 +21,8 @@ def create_sql_database(
|
|
|
21
21
|
|
|
22
22
|
This is a wrapper function for the following API: `Items - Create SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/create-sql-database>`_.
|
|
23
23
|
|
|
24
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
25
|
+
|
|
24
26
|
Parameters
|
|
25
27
|
----------
|
|
26
28
|
name: str
|
|
@@ -47,6 +49,8 @@ def delete_sql_database(
|
|
|
47
49
|
|
|
48
50
|
This is a wrapper function for the following API: `Items - Delete SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/delete-sql-database>`_.
|
|
49
51
|
|
|
52
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
53
|
+
|
|
50
54
|
Parameters
|
|
51
55
|
----------
|
|
52
56
|
sql_database: str | uuid.UUID
|
sempy_labs/_tags.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from ._helper_functions import (
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_create_dataframe,
|
|
4
4
|
_update_dataframe_datatypes,
|
|
@@ -62,6 +62,8 @@ def resolve_tags(tags: str | List[str]) -> List[str]:
|
|
|
62
62
|
"""
|
|
63
63
|
Resolves the tags to a list of strings.
|
|
64
64
|
|
|
65
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
66
|
+
|
|
65
67
|
Parameters
|
|
66
68
|
----------
|
|
67
69
|
tags : str | List[str]
|