semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -4
- sempy_labs/_capacities.py +22 -127
- sempy_labs/_capacity_migration.py +11 -9
- sempy_labs/_dashboards.py +60 -0
- sempy_labs/_data_pipelines.py +5 -31
- sempy_labs/_dax.py +17 -3
- sempy_labs/_delta_analyzer.py +279 -127
- sempy_labs/_environments.py +20 -48
- sempy_labs/_eventhouses.py +69 -30
- sempy_labs/_eventstreams.py +16 -34
- sempy_labs/_gateways.py +4 -4
- sempy_labs/_generate_semantic_model.py +30 -10
- sempy_labs/_git.py +90 -1
- sempy_labs/_graphQL.py +3 -20
- sempy_labs/_helper_functions.py +201 -44
- sempy_labs/_job_scheduler.py +226 -2
- sempy_labs/_kql_databases.py +19 -34
- sempy_labs/_kql_querysets.py +15 -32
- sempy_labs/_list_functions.py +14 -133
- sempy_labs/_mirrored_databases.py +14 -48
- sempy_labs/_ml_experiments.py +5 -30
- sempy_labs/_ml_models.py +4 -28
- sempy_labs/_model_bpa.py +17 -0
- sempy_labs/_model_bpa_rules.py +12 -2
- sempy_labs/_mounted_data_factories.py +119 -0
- sempy_labs/_notebooks.py +16 -26
- sempy_labs/_semantic_models.py +117 -0
- sempy_labs/_sql.py +78 -10
- sempy_labs/_sqldatabase.py +227 -0
- sempy_labs/_utils.py +42 -0
- sempy_labs/_vertipaq.py +17 -2
- sempy_labs/_warehouses.py +5 -17
- sempy_labs/_workloads.py +23 -9
- sempy_labs/_workspaces.py +13 -5
- sempy_labs/admin/__init__.py +70 -9
- sempy_labs/admin/_activities.py +166 -0
- sempy_labs/admin/_apps.py +143 -0
- sempy_labs/admin/_artifacts.py +62 -0
- sempy_labs/admin/_basic_functions.py +32 -704
- sempy_labs/admin/_capacities.py +311 -0
- sempy_labs/admin/_datasets.py +184 -0
- sempy_labs/admin/_domains.py +1 -1
- sempy_labs/admin/_items.py +3 -1
- sempy_labs/admin/_reports.py +239 -0
- sempy_labs/admin/_scanner.py +0 -1
- sempy_labs/admin/_shared.py +76 -0
- sempy_labs/admin/_tenant.py +489 -0
- sempy_labs/admin/_users.py +133 -0
- sempy_labs/admin/_workspaces.py +148 -0
- sempy_labs/directlake/_dl_helper.py +0 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
- sempy_labs/graph/_teams.py +1 -1
- sempy_labs/graph/_users.py +9 -1
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +6 -7
- sempy_labs/lakehouse/_shortcuts.py +216 -64
- sempy_labs/report/__init__.py +3 -1
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +272 -0
- sempy_labs/report/_generate_report.py +9 -17
- sempy_labs/report/_report_bpa.py +12 -19
- sempy_labs/report/_report_functions.py +9 -261
- sempy_labs/tom/_model.py +307 -40
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
|
@@ -2,10 +2,11 @@ import sempy.fabric as fabric
|
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
from sempy._utils._log import log
|
|
4
4
|
import sempy_labs._icons as icons
|
|
5
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
6
5
|
from sempy_labs._workspaces import assign_workspace_to_capacity
|
|
7
|
-
from sempy_labs.admin
|
|
6
|
+
from sempy_labs.admin import (
|
|
8
7
|
assign_workspaces_to_capacity,
|
|
8
|
+
)
|
|
9
|
+
from sempy_labs.admin._capacities import (
|
|
9
10
|
_list_capacities_meta,
|
|
10
11
|
list_capacities,
|
|
11
12
|
)
|
|
@@ -15,6 +16,7 @@ from sempy_labs._helper_functions import (
|
|
|
15
16
|
_base_api,
|
|
16
17
|
)
|
|
17
18
|
from sempy_labs._capacities import create_fabric_capacity
|
|
19
|
+
from uuid import UUID
|
|
18
20
|
|
|
19
21
|
|
|
20
22
|
def _migrate_settings(source_capacity: str, target_capacity: str):
|
|
@@ -530,7 +532,7 @@ def _migrate_delegated_tenant_settings(source_capacity: str, target_capacity: st
|
|
|
530
532
|
|
|
531
533
|
|
|
532
534
|
@log
|
|
533
|
-
def _migrate_spark_settings(source_capacity: str, target_capacity: str):
|
|
535
|
+
def _migrate_spark_settings(source_capacity: str | UUID, target_capacity: str | UUID):
|
|
534
536
|
"""
|
|
535
537
|
This function migrates a capacity's spark settings to another capacity.
|
|
536
538
|
|
|
@@ -538,14 +540,14 @@ def _migrate_spark_settings(source_capacity: str, target_capacity: str):
|
|
|
538
540
|
|
|
539
541
|
Parameters
|
|
540
542
|
----------
|
|
541
|
-
source_capacity : str
|
|
542
|
-
Name of the source capacity.
|
|
543
|
-
target_capacity : str
|
|
544
|
-
Name of the target capacity.
|
|
543
|
+
source_capacity : str | uuid.UUID
|
|
544
|
+
Name or ID of the source capacity.
|
|
545
|
+
target_capacity : str | uuid.UUID
|
|
546
|
+
Name or ID of the target capacity.
|
|
545
547
|
"""
|
|
546
548
|
|
|
547
|
-
source_capacity_id = resolve_capacity_id(
|
|
548
|
-
target_capacity_id = resolve_capacity_id(
|
|
549
|
+
source_capacity_id = resolve_capacity_id(capacity=source_capacity)
|
|
550
|
+
target_capacity_id = resolve_capacity_id(capacity=target_capacity)
|
|
549
551
|
|
|
550
552
|
# Get source capacity server dns
|
|
551
553
|
response = _base_api(request=f"metadata/capacityInformation/{source_capacity_id}")
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
_create_dataframe,
|
|
6
|
+
_base_api,
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
_update_dataframe_datatypes,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
13
|
+
"""
|
|
14
|
+
Shows a list of the dashboards within a workspace.
|
|
15
|
+
|
|
16
|
+
Parameters
|
|
17
|
+
----------
|
|
18
|
+
workspace : str | uuid.UUID, default=None
|
|
19
|
+
The Fabric workspace name or ID.
|
|
20
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
21
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
pandas.DataFrame
|
|
26
|
+
A pandas dataframe showing the dashboards within a workspace.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
columns = {
|
|
30
|
+
"Dashboard ID": "string",
|
|
31
|
+
"Dashboard Name": "string",
|
|
32
|
+
"Read Only": "bool",
|
|
33
|
+
"Web URL": "string",
|
|
34
|
+
"Embed URL": "string",
|
|
35
|
+
"Data Classification": "string",
|
|
36
|
+
"Users": "string",
|
|
37
|
+
"Subscriptions": "string",
|
|
38
|
+
}
|
|
39
|
+
df = _create_dataframe(columns=columns)
|
|
40
|
+
|
|
41
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
42
|
+
|
|
43
|
+
response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
|
|
44
|
+
|
|
45
|
+
for v in response.json().get("value", []):
|
|
46
|
+
new_data = {
|
|
47
|
+
"Dashboard ID": v.get("id"),
|
|
48
|
+
"Dashboard Name": v.get("displayName"),
|
|
49
|
+
"Read Only": v.get("isReadOnly"),
|
|
50
|
+
"Web URL": v.get("webUrl"),
|
|
51
|
+
"Embed URL": v.get("embedUrl"),
|
|
52
|
+
"Data Classification": v.get("dataClassification"),
|
|
53
|
+
"Users": v.get("users"),
|
|
54
|
+
"Subscriptions": v.get("subscriptions"),
|
|
55
|
+
}
|
|
56
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
57
|
+
|
|
58
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
59
|
+
|
|
60
|
+
return df
|
sempy_labs/_data_pipelines.py
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_decode_b64,
|
|
7
6
|
_base_api,
|
|
8
|
-
_print_success,
|
|
9
7
|
resolve_item_id,
|
|
10
8
|
_create_dataframe,
|
|
9
|
+
delete_item,
|
|
10
|
+
create_item,
|
|
11
11
|
)
|
|
12
12
|
from uuid import UUID
|
|
13
13
|
|
|
@@ -76,25 +76,8 @@ def create_data_pipeline(
|
|
|
76
76
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
77
77
|
"""
|
|
78
78
|
|
|
79
|
-
(
|
|
80
|
-
|
|
81
|
-
payload = {"displayName": name}
|
|
82
|
-
|
|
83
|
-
if description:
|
|
84
|
-
payload["description"] = description
|
|
85
|
-
|
|
86
|
-
_base_api(
|
|
87
|
-
request=f"/v1/workspaces/{workspace_id}/dataPipelines",
|
|
88
|
-
method="post",
|
|
89
|
-
payload=payload,
|
|
90
|
-
status_codes=[201, 202],
|
|
91
|
-
lro_return_status_code=True,
|
|
92
|
-
)
|
|
93
|
-
_print_success(
|
|
94
|
-
item_name=name,
|
|
95
|
-
item_type="data pipeline",
|
|
96
|
-
workspace_name=workspace_name,
|
|
97
|
-
action="created",
|
|
79
|
+
create_item(
|
|
80
|
+
name=name, description=description, type="DataPipeline", workspace=workspace
|
|
98
81
|
)
|
|
99
82
|
|
|
100
83
|
|
|
@@ -114,16 +97,7 @@ def delete_data_pipeline(name: str | UUID, workspace: Optional[str | UUID] = Non
|
|
|
114
97
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
115
98
|
"""
|
|
116
99
|
|
|
117
|
-
(
|
|
118
|
-
item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
|
|
119
|
-
|
|
120
|
-
fabric.delete_item(item_id=item_id, workspace=workspace)
|
|
121
|
-
_print_success(
|
|
122
|
-
item_name=name,
|
|
123
|
-
item_type="data pipeline",
|
|
124
|
-
workspace_name=workspace_name,
|
|
125
|
-
action="deleted",
|
|
126
|
-
)
|
|
100
|
+
delete_item(item=name, type="DataPipeline", workspace=workspace)
|
|
127
101
|
|
|
128
102
|
|
|
129
103
|
def get_data_pipeline_definition(
|
sempy_labs/_dax.py
CHANGED
|
@@ -62,9 +62,23 @@ def evaluate_dax_impersonation(
|
|
|
62
62
|
payload=payload,
|
|
63
63
|
)
|
|
64
64
|
data = response.json()["results"][0]["tables"]
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
65
|
+
|
|
66
|
+
# Get all possible column names from all rows because null columns aren't returned
|
|
67
|
+
all_columns = set()
|
|
68
|
+
for item in data:
|
|
69
|
+
for row in item["rows"]:
|
|
70
|
+
all_columns.update(row.keys())
|
|
71
|
+
|
|
72
|
+
# Create rows with all columns, filling missing values with None
|
|
73
|
+
rows = []
|
|
74
|
+
for item in data:
|
|
75
|
+
for row in item["rows"]:
|
|
76
|
+
# Create a new row with all columns, defaulting to None
|
|
77
|
+
new_row = {col: row.get(col) for col in all_columns}
|
|
78
|
+
rows.append(new_row)
|
|
79
|
+
|
|
80
|
+
# Create DataFrame from the processed rows
|
|
81
|
+
df = pd.DataFrame(rows)
|
|
68
82
|
|
|
69
83
|
return df
|
|
70
84
|
|