semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/METADATA +7 -6
- semantic_link_labs-0.11.3.dist-info/RECORD +212 -0
- sempy_labs/__init__.py +65 -71
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +78 -23
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_helper_functions.py +169 -5
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +151 -2
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +84 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +109 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +38 -1
- sempy_labs/lakehouse/_lakehouse.py +16 -7
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +35 -44
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +9 -20
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -9
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +11 -20
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +248 -250
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +19 -6
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +13 -8
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -110
- sempy_labs/_variable_libraries.py +0 -91
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/top_level.txt +0 -0
sempy_labs/_job_scheduler.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from sempy._utils._log import log
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional, List
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_item_name_and_id,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
@@ -67,27 +67,28 @@ def list_item_job_instances(
|
|
|
67
67
|
if not responses[0].get("value"):
|
|
68
68
|
return df
|
|
69
69
|
|
|
70
|
-
|
|
70
|
+
rows = []
|
|
71
71
|
for r in responses:
|
|
72
72
|
for v in r.get("value", []):
|
|
73
73
|
fail = v.get("failureReason", {})
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
74
|
+
rows.append(
|
|
75
|
+
{
|
|
76
|
+
"Job Instance Id": v.get("id"),
|
|
77
|
+
"Item Name": item_name,
|
|
78
|
+
"Item Id": v.get("itemId"),
|
|
79
|
+
"Item Type": type,
|
|
80
|
+
"Job Type": v.get("jobType"),
|
|
81
|
+
"Invoke Type": v.get("invokeType"),
|
|
82
|
+
"Status": v.get("status"),
|
|
83
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
84
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
85
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
86
|
+
"Error Message": fail.get("message") if fail is not None else "",
|
|
87
|
+
}
|
|
88
|
+
)
|
|
88
89
|
|
|
89
|
-
if
|
|
90
|
-
df = pd.
|
|
90
|
+
if rows:
|
|
91
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
91
92
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
92
93
|
|
|
93
94
|
return df
|
|
@@ -111,24 +112,25 @@ def _get_item_job_instance(url: str) -> pd.DataFrame:
|
|
|
111
112
|
|
|
112
113
|
response = _base_api(request=url)
|
|
113
114
|
|
|
114
|
-
|
|
115
|
+
rows = []
|
|
115
116
|
for v in response.json().get("value", []):
|
|
116
117
|
fail = v.get("failureReason", {})
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
118
|
+
rows.append(
|
|
119
|
+
{
|
|
120
|
+
"Job Instance Id": v.get("id"),
|
|
121
|
+
"Item Id": v.get("itemId"),
|
|
122
|
+
"Job Type": v.get("jobType"),
|
|
123
|
+
"Invoke Type": v.get("invokeType"),
|
|
124
|
+
"Status": v.get("status"),
|
|
125
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
126
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
127
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
128
|
+
"Error Message": fail.get("message") if fail is not None else "",
|
|
129
|
+
}
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
if rows:
|
|
133
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
132
134
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
133
135
|
|
|
134
136
|
return df
|
|
@@ -190,29 +192,29 @@ def list_item_schedules(
|
|
|
190
192
|
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules"
|
|
191
193
|
)
|
|
192
194
|
|
|
193
|
-
|
|
195
|
+
rows = []
|
|
194
196
|
for v in response.json().get("value", []):
|
|
195
197
|
config = v.get("configuration", {})
|
|
196
198
|
own = v.get("owner", {})
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
if
|
|
215
|
-
df = pd.
|
|
199
|
+
rows.append(
|
|
200
|
+
{
|
|
201
|
+
"Job Schedule Id": v.get("id"),
|
|
202
|
+
"Enabled": v.get("enabled"),
|
|
203
|
+
"Created Date Time": v.get("createdDateTime"),
|
|
204
|
+
"Start Date Time": config.get("startDateTime"),
|
|
205
|
+
"End Date Time": config.get("endDateTime"),
|
|
206
|
+
"Local Time Zone Id": config.get("localTimeZoneId"),
|
|
207
|
+
"Type": config.get("type"),
|
|
208
|
+
"Interval": config.get("interval"),
|
|
209
|
+
"Weekdays": config.get("weekdays"),
|
|
210
|
+
"Times": config.get("times"),
|
|
211
|
+
"Owner Id": own.get("id"),
|
|
212
|
+
"Owner Type": own.get("type"),
|
|
213
|
+
}
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
if rows:
|
|
217
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
216
218
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
217
219
|
|
|
218
220
|
return df
|
sempy_labs/_kql_databases.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
4
|
-
resolve_workspace_name_and_id,
|
|
3
|
+
from ._helper_functions import (
|
|
5
4
|
_base_api,
|
|
6
5
|
_create_dataframe,
|
|
7
6
|
delete_item,
|
|
@@ -55,24 +54,24 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
55
54
|
client="fabric_sp",
|
|
56
55
|
)
|
|
57
56
|
|
|
58
|
-
|
|
57
|
+
rows = []
|
|
59
58
|
for r in responses:
|
|
60
59
|
for v in r.get("value", []):
|
|
61
60
|
prop = v.get("properties", {})
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
if
|
|
75
|
-
df = pd.
|
|
61
|
+
rows.append(
|
|
62
|
+
{
|
|
63
|
+
"KQL Database Name": v.get("displayName"),
|
|
64
|
+
"KQL Database Id": v.get("id"),
|
|
65
|
+
"Description": v.get("description"),
|
|
66
|
+
"Parent Eventhouse Item Id": prop.get("parentEventhouseItemId"),
|
|
67
|
+
"Query Service URI": prop.get("queryServiceUri"),
|
|
68
|
+
"Ingestion Service URI": prop.get("ingestionServiceUri"),
|
|
69
|
+
"Database Type": prop.get("databaseType"),
|
|
70
|
+
}
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
if rows:
|
|
74
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
76
75
|
|
|
77
76
|
return df
|
|
78
77
|
|
sempy_labs/_kql_querysets.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_id,
|
|
6
6
|
_base_api,
|
|
7
7
|
_create_dataframe,
|
|
@@ -45,18 +45,19 @@ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
45
45
|
request=f"v1/workspaces/{workspace_id}/kqlQuerysets", uses_pagination=True
|
|
46
46
|
)
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
rows = []
|
|
49
49
|
for r in responses:
|
|
50
50
|
for v in r.get("value", []):
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
51
|
+
rows.append(
|
|
52
|
+
{
|
|
53
|
+
"KQL Queryset Name": v.get("displayName"),
|
|
54
|
+
"KQL Queryset Id": v.get("id"),
|
|
55
|
+
"Description": v.get("description"),
|
|
56
|
+
}
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
if rows:
|
|
60
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
60
61
|
|
|
61
62
|
return df
|
|
62
63
|
|
sempy_labs/_kusto.py
CHANGED
|
@@ -5,8 +5,8 @@ from sempy._utils._log import log
|
|
|
5
5
|
import sempy_labs._icons as icons
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from uuid import UUID
|
|
8
|
-
from
|
|
9
|
-
from
|
|
8
|
+
from ._kql_databases import _resolve_cluster_uri
|
|
9
|
+
from ._helper_functions import resolve_item_id
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
@log
|
sempy_labs/_labels.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import requests
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from typing import Optional, Union
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
7
|
+
from sempy._utils._log import log
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@log
|
|
11
|
+
def list_item_labels(workspace: Optional[Union[str, UUID]] = None) -> pd.DataFrame:
|
|
12
|
+
"""
|
|
13
|
+
List all items within a workspace and shows their sensitivity labels.
|
|
14
|
+
|
|
15
|
+
NOTE: This function uses an internal API and is subject to change/break without notice.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str | uuid.UUID, default=None
|
|
20
|
+
The Fabric workspace name or ID.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
pandas.DataFrame
|
|
26
|
+
A pandas dataframe showing a list of all items within a workspace and their sensitivity labels.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
import notebookutils
|
|
30
|
+
|
|
31
|
+
token = notebookutils.credentials.getToken("pbi")
|
|
32
|
+
headers = {"Authorization": f"Bearer {token}"}
|
|
33
|
+
|
|
34
|
+
# Item types handled in special payload fields
|
|
35
|
+
grouped_types = {
|
|
36
|
+
"dashboards": "Dashboard",
|
|
37
|
+
"reports": "Report",
|
|
38
|
+
"models": "SemanticModel",
|
|
39
|
+
"dataflows": "Dataflow",
|
|
40
|
+
"datamarts": "Datamart",
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
# All other item types go into 'artifacts'
|
|
44
|
+
fabric_items = [
|
|
45
|
+
"Datamart",
|
|
46
|
+
"Lakehouse",
|
|
47
|
+
"Eventhouse",
|
|
48
|
+
"Environment",
|
|
49
|
+
"KQLDatabase",
|
|
50
|
+
"KQLQueryset",
|
|
51
|
+
"KQLDashboard",
|
|
52
|
+
"DataPipeline",
|
|
53
|
+
"Notebook",
|
|
54
|
+
"SparkJobDefinition",
|
|
55
|
+
"MLExperiment",
|
|
56
|
+
"MLModel",
|
|
57
|
+
"Warehouse",
|
|
58
|
+
"Eventstream",
|
|
59
|
+
"SQLEndpoint",
|
|
60
|
+
"MirroredWarehouse",
|
|
61
|
+
"MirroredDatabase",
|
|
62
|
+
"Reflex",
|
|
63
|
+
"GraphQLApi",
|
|
64
|
+
"MountedDataFactory",
|
|
65
|
+
"SQLDatabase",
|
|
66
|
+
"CopyJob",
|
|
67
|
+
"VariableLibrary",
|
|
68
|
+
"Dataflow",
|
|
69
|
+
"ApacheAirflowJob",
|
|
70
|
+
"WarehouseSnapshot",
|
|
71
|
+
"DigitalTwinBuilder",
|
|
72
|
+
"DigitalTwinBuilderFlow",
|
|
73
|
+
"MirroredAzureDatabricksCatalog",
|
|
74
|
+
"DataAgent",
|
|
75
|
+
"UserDataFunction",
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
dfI = fabric.list_items(workspace=workspace)
|
|
79
|
+
|
|
80
|
+
payload = {
|
|
81
|
+
key: [{"artifactId": i} for i in dfI[dfI["Type"] == value]["Id"].tolist()]
|
|
82
|
+
for key, value in grouped_types.items()
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# Add generic artifact types
|
|
86
|
+
artifact_ids = dfI[dfI["Type"].isin(fabric_items)]["Id"].tolist()
|
|
87
|
+
if artifact_ids:
|
|
88
|
+
payload["artifacts"] = [{"artifactId": i} for i in artifact_ids]
|
|
89
|
+
|
|
90
|
+
client = fabric.PowerBIRestClient()
|
|
91
|
+
response = client.get("/v1.0/myorg/capacities")
|
|
92
|
+
if response.status_code != 200:
|
|
93
|
+
raise FabricHTTPException("Failed to retrieve URL prefix.")
|
|
94
|
+
context = response.json().get("@odata.context")
|
|
95
|
+
prefix = context.split("/v1.0")[0]
|
|
96
|
+
|
|
97
|
+
response = requests.post(
|
|
98
|
+
f"{prefix}/metadata/informationProtection/artifacts",
|
|
99
|
+
json=payload,
|
|
100
|
+
headers=headers,
|
|
101
|
+
)
|
|
102
|
+
if response.status_code != 200:
|
|
103
|
+
raise FabricHTTPException(f"Failed to retrieve labels: {response.text}")
|
|
104
|
+
result = response.json()
|
|
105
|
+
|
|
106
|
+
label_keys = [
|
|
107
|
+
"artifactInformationProtections",
|
|
108
|
+
"datasetInformationProtections",
|
|
109
|
+
"reportInformationProtections",
|
|
110
|
+
"dashboardInformationProtections",
|
|
111
|
+
]
|
|
112
|
+
|
|
113
|
+
rows = [
|
|
114
|
+
{
|
|
115
|
+
"Id": item.get("artifactObjectId"),
|
|
116
|
+
"Label Id": item.get("labelId"),
|
|
117
|
+
"Label Name": item.get("name"),
|
|
118
|
+
"Parent Label Name": item.get("parent", {}).get("name"),
|
|
119
|
+
"Label Description": item.get("tooltip"),
|
|
120
|
+
}
|
|
121
|
+
for key in label_keys
|
|
122
|
+
for item in result.get(key, [])
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
df_labels = pd.DataFrame(rows)
|
|
126
|
+
return dfI.merge(df_labels, on="Id", how="left")
|
sempy_labs/_list_functions.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
|
-
from
|
|
2
|
+
from ._helper_functions import (
|
|
3
3
|
resolve_workspace_name_and_id,
|
|
4
4
|
create_relationship_name,
|
|
5
5
|
format_dax_object_name,
|
|
@@ -1131,7 +1131,7 @@ def list_reports_using_semantic_model(
|
|
|
1131
1131
|
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
1132
1132
|
) -> pd.DataFrame:
|
|
1133
1133
|
"""
|
|
1134
|
-
Shows a list of all the reports
|
|
1134
|
+
Shows a list of all the reports which use a given semantic model. This is limited to the reports which are in the same workspace as the semantic model.
|
|
1135
1135
|
|
|
1136
1136
|
Parameters
|
|
1137
1137
|
----------
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_is_valid_uuid,
|
|
7
7
|
_base_api,
|
|
@@ -118,23 +118,26 @@ def list_managed_private_endpoints(
|
|
|
118
118
|
client="fabric_sp",
|
|
119
119
|
)
|
|
120
120
|
|
|
121
|
-
|
|
121
|
+
rows = []
|
|
122
122
|
for r in responses:
|
|
123
123
|
for v in r.get("value", []):
|
|
124
124
|
conn = v.get("connectionState", {})
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
125
|
+
rows.append(
|
|
126
|
+
{
|
|
127
|
+
"Managed Private Endpoint Name": v.get("name"),
|
|
128
|
+
"Managed Private Endpoint Id": v.get("id"),
|
|
129
|
+
"Target Private Link Resource Id": v.get(
|
|
130
|
+
"targetPrivateLinkResourceId"
|
|
131
|
+
),
|
|
132
|
+
"Provisioning State": v.get("provisioningState"),
|
|
133
|
+
"Connection Status": conn.get("status"),
|
|
134
|
+
"Connection Description": conn.get("description"),
|
|
135
|
+
"Target Subresource Type": v.get("targetSubresourceType"),
|
|
136
|
+
}
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
if rows:
|
|
140
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
138
141
|
|
|
139
142
|
return df
|
|
140
143
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
5
|
_update_dataframe_datatypes,
|
|
6
6
|
_base_api,
|
|
@@ -58,25 +58,26 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
|
|
|
58
58
|
client="fabric_sp",
|
|
59
59
|
)
|
|
60
60
|
|
|
61
|
-
|
|
61
|
+
rows = []
|
|
62
62
|
for r in responses:
|
|
63
63
|
for v in r.get("value", []):
|
|
64
64
|
prop = v.get("properties", {})
|
|
65
65
|
sql = prop.get("sqlEndpointProperties", {})
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
66
|
+
rows.append(
|
|
67
|
+
{
|
|
68
|
+
"Mirrored Database Name": v.get("displayName"),
|
|
69
|
+
"Mirrored Database Id": v.get("id"),
|
|
70
|
+
"Description": v.get("description"),
|
|
71
|
+
"OneLake Tables Path": prop.get("oneLakeTablesPath"),
|
|
72
|
+
"SQL Endpoint Connection String": sql.get("connectionString"),
|
|
73
|
+
"SQL Endpoint Id": sql.get("id"),
|
|
74
|
+
"Provisioning Status": sql.get("provisioningStatus"),
|
|
75
|
+
"Default Schema": prop.get("defaultSchema"),
|
|
76
|
+
}
|
|
77
|
+
)
|
|
77
78
|
|
|
78
|
-
if
|
|
79
|
-
df = pd.
|
|
79
|
+
if rows:
|
|
80
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
80
81
|
|
|
81
82
|
return df
|
|
82
83
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
@@ -43,17 +43,18 @@ def list_mirrored_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataF
|
|
|
43
43
|
uses_pagination=True,
|
|
44
44
|
)
|
|
45
45
|
|
|
46
|
-
|
|
46
|
+
rows = []
|
|
47
47
|
for r in responses:
|
|
48
48
|
for v in r.get("value", []):
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
49
|
+
rows.append(
|
|
50
|
+
{
|
|
51
|
+
"Mirrored Warehouse Name": v.get("displayName"),
|
|
52
|
+
"Mirrored Warehouse Id": v.get("id"),
|
|
53
|
+
"Description": v.get("description"),
|
|
54
|
+
}
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
if rows:
|
|
58
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
58
59
|
|
|
59
60
|
return df
|
sempy_labs/_ml_experiments.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
delete_item,
|
|
@@ -46,22 +46,23 @@ def list_ml_experiments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
46
46
|
uses_pagination=True,
|
|
47
47
|
)
|
|
48
48
|
|
|
49
|
-
|
|
49
|
+
rows = []
|
|
50
50
|
for r in responses:
|
|
51
51
|
for v in r.get("value", []):
|
|
52
52
|
model_id = v.get("id")
|
|
53
53
|
modelName = v.get("displayName")
|
|
54
54
|
desc = v.get("description")
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
56
|
+
rows.append(
|
|
57
|
+
{
|
|
58
|
+
"ML Experiment Name": modelName,
|
|
59
|
+
"ML Experiment Id": model_id,
|
|
60
|
+
"Description": desc,
|
|
61
|
+
}
|
|
62
|
+
)
|
|
62
63
|
|
|
63
|
-
if
|
|
64
|
-
df = pd.
|
|
64
|
+
if rows:
|
|
65
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
65
66
|
|
|
66
67
|
return df
|
|
67
68
|
|
sempy_labs/_model_auto_build.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
3
|
+
from .tom import connect_semantic_model
|
|
4
|
+
from ._generate_semantic_model import create_blank_semantic_model
|
|
5
|
+
from .directlake._generate_shared_expression import generate_shared_expression
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from sempy._utils._log import log
|
|
8
8
|
|
sempy_labs/_model_bpa.py
CHANGED
|
@@ -3,8 +3,8 @@ import pandas as pd
|
|
|
3
3
|
import warnings
|
|
4
4
|
import datetime
|
|
5
5
|
from IPython.display import display, HTML
|
|
6
|
-
from
|
|
7
|
-
from
|
|
6
|
+
from ._model_dependencies import get_model_calc_dependencies
|
|
7
|
+
from ._helper_functions import (
|
|
8
8
|
format_dax_object_name,
|
|
9
9
|
create_relationship_name,
|
|
10
10
|
save_as_delta_table,
|
|
@@ -15,9 +15,9 @@ from sempy_labs._helper_functions import (
|
|
|
15
15
|
resolve_workspace_name_and_id,
|
|
16
16
|
_create_spark_session,
|
|
17
17
|
)
|
|
18
|
-
from
|
|
19
|
-
from
|
|
20
|
-
from
|
|
18
|
+
from .lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
19
|
+
from .tom import connect_semantic_model
|
|
20
|
+
from ._model_bpa_rules import model_bpa_rules
|
|
21
21
|
from typing import Optional
|
|
22
22
|
from sempy._utils._log import log
|
|
23
23
|
import sempy_labs._icons as icons
|
sempy_labs/_model_bpa_bulk.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import datetime
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
save_as_delta_table,
|
|
6
6
|
resolve_workspace_capacity,
|
|
7
7
|
retry,
|
|
@@ -9,11 +9,11 @@ from sempy_labs._helper_functions import (
|
|
|
9
9
|
resolve_workspace_id,
|
|
10
10
|
resolve_lakehouse_name_and_id,
|
|
11
11
|
)
|
|
12
|
-
from
|
|
12
|
+
from .lakehouse import (
|
|
13
13
|
get_lakehouse_tables,
|
|
14
14
|
lakehouse_attached,
|
|
15
15
|
)
|
|
16
|
-
from
|
|
16
|
+
from ._model_bpa import run_model_bpa
|
|
17
17
|
from typing import Optional, List
|
|
18
18
|
from sempy._utils._log import log
|
|
19
19
|
import sempy_labs._icons as icons
|