semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/METADATA +43 -7
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/RECORD +59 -40
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +116 -58
- sempy_labs/_ai.py +0 -2
- sempy_labs/_capacities.py +39 -3
- sempy_labs/_capacity_migration.py +623 -0
- sempy_labs/_clear_cache.py +8 -8
- sempy_labs/_connections.py +15 -13
- sempy_labs/_data_pipelines.py +118 -0
- sempy_labs/_documentation.py +144 -0
- sempy_labs/_eventhouses.py +118 -0
- sempy_labs/_eventstreams.py +118 -0
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +23 -24
- sempy_labs/_helper_functions.py +140 -47
- sempy_labs/_icons.py +40 -0
- sempy_labs/_kql_databases.py +134 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_list_functions.py +218 -421
- sempy_labs/_mirrored_warehouses.py +50 -0
- sempy_labs/_ml_experiments.py +122 -0
- sempy_labs/_ml_models.py +120 -0
- sempy_labs/_model_auto_build.py +0 -4
- sempy_labs/_model_bpa.py +10 -12
- sempy_labs/_model_bpa_bulk.py +8 -7
- sempy_labs/_model_dependencies.py +26 -18
- sempy_labs/_notebooks.py +5 -16
- sempy_labs/_query_scale_out.py +6 -5
- sempy_labs/_refresh_semantic_model.py +7 -19
- sempy_labs/_spark.py +40 -45
- sempy_labs/_sql.py +60 -15
- sempy_labs/_vertipaq.py +25 -25
- sempy_labs/_warehouses.py +132 -0
- sempy_labs/_workspaces.py +0 -3
- sempy_labs/admin/__init__.py +53 -0
- sempy_labs/admin/_basic_functions.py +888 -0
- sempy_labs/admin/_domains.py +411 -0
- sempy_labs/directlake/_directlake_schema_sync.py +1 -1
- sempy_labs/directlake/_dl_helper.py +32 -16
- sempy_labs/directlake/_generate_shared_expression.py +11 -14
- sempy_labs/directlake/_guardrails.py +7 -7
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
- sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
- sempy_labs/directlake/_warm_cache.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
- sempy_labs/lakehouse/_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/report/__init__.py +9 -6
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_report_bpa.py +369 -0
- sempy_labs/report/_report_bpa_rules.py +113 -0
- sempy_labs/report/_report_helper.py +254 -0
- sempy_labs/report/_report_list_functions.py +95 -0
- sempy_labs/report/_report_rebind.py +0 -4
- sempy_labs/report/_reportwrapper.py +2037 -0
- sempy_labs/tom/_model.py +333 -22
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/top_level.txt +0 -0
sempy_labs/_connections.py
CHANGED
|
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
4
4
|
from typing import Optional
|
|
5
|
-
import sempy_labs._icons as icons
|
|
6
5
|
from sempy_labs._helper_functions import pagination
|
|
7
6
|
|
|
8
7
|
|
|
@@ -77,8 +76,9 @@ def list_connections() -> pd.DataFrame:
|
|
|
77
76
|
return df
|
|
78
77
|
|
|
79
78
|
|
|
80
|
-
def list_item_connections(
|
|
81
|
-
|
|
79
|
+
def list_item_connections(
|
|
80
|
+
item_name: str, item_type: str, workspace: Optional[str] = None
|
|
81
|
+
) -> pd.DataFrame:
|
|
82
82
|
"""
|
|
83
83
|
Shows the list of connections that the specified item is connected to.
|
|
84
84
|
|
|
@@ -104,7 +104,9 @@ def list_item_connections(item_name: str, item_type: str, workspace: Optional[st
|
|
|
104
104
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
105
105
|
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
106
106
|
item_type = item_type[0].upper() + item_type[1:]
|
|
107
|
-
item_id = fabric.resolve_item_id(
|
|
107
|
+
item_id = fabric.resolve_item_id(
|
|
108
|
+
item_name=item_name, type=item_type, workspace=workspace
|
|
109
|
+
)
|
|
108
110
|
|
|
109
111
|
client = fabric.FabricRestClient()
|
|
110
112
|
response = client.post(f"/v1/workspaces/{workspace_id}/items/{item_id}/connections")
|
|
@@ -123,17 +125,17 @@ def list_item_connections(item_name: str, item_type: str, workspace: Optional[st
|
|
|
123
125
|
if response.status_code != 200:
|
|
124
126
|
raise FabricHTTPException(response)
|
|
125
127
|
|
|
126
|
-
|
|
128
|
+
responses = pagination(client, response)
|
|
127
129
|
|
|
128
|
-
for r in
|
|
129
|
-
for v in r.get(
|
|
130
|
+
for r in responses:
|
|
131
|
+
for v in r.get("value", []):
|
|
130
132
|
new_data = {
|
|
131
|
-
"Connection Name": v.get(
|
|
132
|
-
"Connection Id": v.get(
|
|
133
|
-
"Connectivity Type": v.get(
|
|
134
|
-
"Connection Type": v.get(
|
|
135
|
-
"Connection Path": v.get(
|
|
136
|
-
"Gateway Id": v.get(
|
|
133
|
+
"Connection Name": v.get("displayName"),
|
|
134
|
+
"Connection Id": v.get("id"),
|
|
135
|
+
"Connectivity Type": v.get("connectivityType"),
|
|
136
|
+
"Connection Type": v.get("connectionDetails", {}).get("type"),
|
|
137
|
+
"Connection Path": v.get("connectionDetails", {}).get("path"),
|
|
138
|
+
"Gateway Id": v.get("gatewayId"),
|
|
137
139
|
}
|
|
138
140
|
|
|
139
141
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the data pipelines within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the data pipelines within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"])
|
|
31
|
+
|
|
32
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
33
|
+
|
|
34
|
+
client = fabric.FabricRestClient()
|
|
35
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
|
|
36
|
+
if response.status_code != 200:
|
|
37
|
+
raise FabricHTTPException(response)
|
|
38
|
+
|
|
39
|
+
responses = pagination(client, response)
|
|
40
|
+
|
|
41
|
+
for r in responses:
|
|
42
|
+
for v in r.get("value", []):
|
|
43
|
+
new_data = {
|
|
44
|
+
"Data Pipeline Name": v.get("displayName"),
|
|
45
|
+
"Data Pipeline ID": v.get("id"),
|
|
46
|
+
"Description": v.get("description"),
|
|
47
|
+
}
|
|
48
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
49
|
+
|
|
50
|
+
return df
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def create_data_pipeline(
|
|
54
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
55
|
+
):
|
|
56
|
+
"""
|
|
57
|
+
Creates a Fabric data pipeline.
|
|
58
|
+
|
|
59
|
+
Parameters
|
|
60
|
+
----------
|
|
61
|
+
name: str
|
|
62
|
+
Name of the data pipeline.
|
|
63
|
+
description : str, default=None
|
|
64
|
+
A description of the environment.
|
|
65
|
+
workspace : str, default=None
|
|
66
|
+
The Fabric workspace name.
|
|
67
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
68
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
72
|
+
|
|
73
|
+
request_body = {"displayName": name}
|
|
74
|
+
|
|
75
|
+
if description:
|
|
76
|
+
request_body["description"] = description
|
|
77
|
+
|
|
78
|
+
client = fabric.FabricRestClient()
|
|
79
|
+
response = client.post(
|
|
80
|
+
f"/v1/workspaces/{workspace_id}/dataPipelines", json=request_body
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
lro(client, response, status_codes=[201, 202])
|
|
84
|
+
|
|
85
|
+
print(
|
|
86
|
+
f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace}' workspace."
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def delete_data_pipeline(name: str, workspace: Optional[str] = None):
|
|
91
|
+
"""
|
|
92
|
+
Deletes a Fabric data pipeline.
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
----------
|
|
96
|
+
name: str
|
|
97
|
+
Name of the data pipeline.
|
|
98
|
+
workspace : str, default=None
|
|
99
|
+
The Fabric workspace name.
|
|
100
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
101
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
105
|
+
|
|
106
|
+
item_id = fabric.resolve_item_id(
|
|
107
|
+
item_name=name, type="DataPipeline", workspace=workspace
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
client = fabric.FabricRestClient()
|
|
111
|
+
response = client.delete(f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}")
|
|
112
|
+
|
|
113
|
+
if response.status_code != 200:
|
|
114
|
+
raise FabricHTTPException(response)
|
|
115
|
+
|
|
116
|
+
print(
|
|
117
|
+
f"{icons.green_dot} The '{name}' data pipeline within the '{workspace}' workspace has been deleted."
|
|
118
|
+
)
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import sempy.fabric as fabric
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def list_all_items(workspaces: Optional[str | List[str]] = None):
|
|
8
|
+
|
|
9
|
+
df = pd.DataFrame(
|
|
10
|
+
columns=[
|
|
11
|
+
"Workspace Name",
|
|
12
|
+
"Workspace Id",
|
|
13
|
+
"Item Name",
|
|
14
|
+
"Item Type",
|
|
15
|
+
"Description",
|
|
16
|
+
]
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
if isinstance(workspaces, str):
|
|
20
|
+
workspaces = [workspaces]
|
|
21
|
+
|
|
22
|
+
dfW = fabric.list_workspaces()
|
|
23
|
+
if workspaces is not None:
|
|
24
|
+
dfW = dfW[dfW["Name"].isin(workspaces)]
|
|
25
|
+
|
|
26
|
+
for _, r in dfW.iterrows():
|
|
27
|
+
workspace_name = r["Name"]
|
|
28
|
+
workspace_id = r["Id"]
|
|
29
|
+
dfI = fabric.list_items(workspace=workspace_name)
|
|
30
|
+
for _, r2 in dfI.iterrows():
|
|
31
|
+
|
|
32
|
+
new_data = {
|
|
33
|
+
"Workspace Name": workspace_name,
|
|
34
|
+
"Workspace Id": workspace_id,
|
|
35
|
+
"Item Name": r2["Name"],
|
|
36
|
+
"Item Type": r2["Type"],
|
|
37
|
+
"Description": r2["Description"],
|
|
38
|
+
}
|
|
39
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
40
|
+
|
|
41
|
+
return df
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def data_dictionary(dataset: str, workspace: Optional[str | None] = None):
|
|
45
|
+
|
|
46
|
+
from sempy_labs.tom import connect_semantic_model
|
|
47
|
+
|
|
48
|
+
sempy.fabric._client._utils._init_analysis_services()
|
|
49
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
50
|
+
|
|
51
|
+
df = pd.DataFrame(
|
|
52
|
+
columns=[
|
|
53
|
+
"Workspace Name",
|
|
54
|
+
"Model Name",
|
|
55
|
+
"Table Name",
|
|
56
|
+
"Object Type",
|
|
57
|
+
"Object Name",
|
|
58
|
+
"Hidden Flag",
|
|
59
|
+
"Description",
|
|
60
|
+
"Display Folder",
|
|
61
|
+
"Measure Formula",
|
|
62
|
+
]
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
with connect_semantic_model(
|
|
66
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
67
|
+
) as tom:
|
|
68
|
+
for t in tom.model.Tables:
|
|
69
|
+
expr = None
|
|
70
|
+
if tom.is_calculated_table(table_name=t.Name):
|
|
71
|
+
pName = next(p.Name for p in t.Partitions)
|
|
72
|
+
expr = t.Partitions[pName].Source.Expression
|
|
73
|
+
|
|
74
|
+
new_data = {
|
|
75
|
+
"Workspace Name": workspace,
|
|
76
|
+
"Model Name": dataset,
|
|
77
|
+
"Table Name": t.Name,
|
|
78
|
+
"Object Type": t.ObjectType,
|
|
79
|
+
"Object Name": t.Name,
|
|
80
|
+
"Hidden Flag": t.IsHidden,
|
|
81
|
+
"Description": t.Description,
|
|
82
|
+
"Display Folder": None,
|
|
83
|
+
"Measure Formula": expr,
|
|
84
|
+
}
|
|
85
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
86
|
+
cols = [c for c in t.Columns if c.Type != TOM.ColumnType.RowNumber]
|
|
87
|
+
for c in cols:
|
|
88
|
+
|
|
89
|
+
def get_calc_column_expression(table_name, column_name):
|
|
90
|
+
expr = None
|
|
91
|
+
if tom.is_calculated_column(
|
|
92
|
+
table_name=table_name, column_name=column_name
|
|
93
|
+
):
|
|
94
|
+
expr = c.Expression
|
|
95
|
+
return expr
|
|
96
|
+
|
|
97
|
+
new_data = {
|
|
98
|
+
"Workspace Name": workspace,
|
|
99
|
+
"Model Name": dataset,
|
|
100
|
+
"Table Name": t.Name,
|
|
101
|
+
"Object Type": c.ObjectType,
|
|
102
|
+
"Object Name": c.Name,
|
|
103
|
+
"Hidden Flag": c.IsHidden,
|
|
104
|
+
"Description": c.Description,
|
|
105
|
+
"Display Folder": c.DisplayFolder,
|
|
106
|
+
"Measure Formula": get_calc_column_expression(t.Name, c.Name),
|
|
107
|
+
}
|
|
108
|
+
df = pd.concat(
|
|
109
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
110
|
+
)
|
|
111
|
+
for m in t.Measures:
|
|
112
|
+
new_data = {
|
|
113
|
+
"Workspace Name": workspace,
|
|
114
|
+
"Model Name": dataset,
|
|
115
|
+
"Table Name": t.Name,
|
|
116
|
+
"Object Type": m.ObjectType,
|
|
117
|
+
"Object Name": m.Name,
|
|
118
|
+
"Hidden Flag": m.IsHidden,
|
|
119
|
+
"Description": m.Description,
|
|
120
|
+
"Display Folder": m.DisplayFolder,
|
|
121
|
+
"Measure Formula": m.Expression,
|
|
122
|
+
}
|
|
123
|
+
df = pd.concat(
|
|
124
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if t.CalculationGroup is not None:
|
|
128
|
+
for ci in t.CalculationGroup.CalculationItems:
|
|
129
|
+
new_data = {
|
|
130
|
+
"Workspace Name": workspace,
|
|
131
|
+
"Model Name": dataset,
|
|
132
|
+
"Table Name": t.Name,
|
|
133
|
+
"Object Type": "Calculation Item",
|
|
134
|
+
"Object Name": ci.Name,
|
|
135
|
+
"Hidden Flag": t.IsHidden,
|
|
136
|
+
"Description": ci.Description,
|
|
137
|
+
"Display Folder": None,
|
|
138
|
+
"Measure Formula": ci.Expression,
|
|
139
|
+
}
|
|
140
|
+
df = pd.concat(
|
|
141
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
return df
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def create_eventhouse(
|
|
14
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
15
|
+
):
|
|
16
|
+
"""
|
|
17
|
+
Creates a Fabric eventhouse.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
name: str
|
|
22
|
+
Name of the eventhouse.
|
|
23
|
+
description : str, default=None
|
|
24
|
+
A description of the environment.
|
|
25
|
+
workspace : str, default=None
|
|
26
|
+
The Fabric workspace name.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
32
|
+
|
|
33
|
+
request_body = {"displayName": name}
|
|
34
|
+
|
|
35
|
+
if description:
|
|
36
|
+
request_body["description"] = description
|
|
37
|
+
|
|
38
|
+
client = fabric.FabricRestClient()
|
|
39
|
+
response = client.post(
|
|
40
|
+
f"/v1/workspaces/{workspace_id}/eventhouses", json=request_body
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
lro(client, response, status_codes=[201, 202])
|
|
44
|
+
|
|
45
|
+
print(
|
|
46
|
+
f"{icons.green_dot} The '{name}' eventhouse has been created within the '{workspace}' workspace."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
51
|
+
"""
|
|
52
|
+
Shows the eventhouses within a workspace.
|
|
53
|
+
|
|
54
|
+
Parameters
|
|
55
|
+
----------
|
|
56
|
+
workspace : str, default=None
|
|
57
|
+
The Fabric workspace name.
|
|
58
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
59
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
pandas.DataFrame
|
|
64
|
+
A pandas dataframe showing the eventhouses within a workspace.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
df = pd.DataFrame(columns=["Eventhouse Name", "Eventhouse Id", "Description"])
|
|
68
|
+
|
|
69
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
70
|
+
|
|
71
|
+
client = fabric.FabricRestClient()
|
|
72
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/eventhouses")
|
|
73
|
+
if response.status_code != 200:
|
|
74
|
+
raise FabricHTTPException(response)
|
|
75
|
+
|
|
76
|
+
responses = pagination(client, response)
|
|
77
|
+
|
|
78
|
+
for r in responses:
|
|
79
|
+
for v in r.get("value", []):
|
|
80
|
+
new_data = {
|
|
81
|
+
"Eventhouse Name": v.get("displayName"),
|
|
82
|
+
"Eventhouse Id": v.get("id"),
|
|
83
|
+
"Description": v.get("description"),
|
|
84
|
+
}
|
|
85
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
86
|
+
|
|
87
|
+
return df
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def delete_eventhouse(name: str, workspace: Optional[str] = None):
|
|
91
|
+
"""
|
|
92
|
+
Deletes a Fabric eventhouse.
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
----------
|
|
96
|
+
name: str
|
|
97
|
+
Name of the eventhouse.
|
|
98
|
+
workspace : str, default=None
|
|
99
|
+
The Fabric workspace name.
|
|
100
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
101
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
105
|
+
|
|
106
|
+
item_id = fabric.resolve_item_id(
|
|
107
|
+
item_name=name, type="Eventhouse", workspace=workspace
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
client = fabric.FabricRestClient()
|
|
111
|
+
response = client.delete(f"/v1/workspaces/{workspace_id}/eventhouses/{item_id}")
|
|
112
|
+
|
|
113
|
+
if response.status_code != 200:
|
|
114
|
+
raise FabricHTTPException(response)
|
|
115
|
+
|
|
116
|
+
print(
|
|
117
|
+
f"{icons.green_dot} The '{name}' eventhouse within the '{workspace}' workspace has been deleted."
|
|
118
|
+
)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the eventstreams within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the eventstreams within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(columns=["Eventstream Name", "Eventstream Id", "Description"])
|
|
31
|
+
|
|
32
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
33
|
+
|
|
34
|
+
client = fabric.FabricRestClient()
|
|
35
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
|
|
36
|
+
if response.status_code != 200:
|
|
37
|
+
raise FabricHTTPException(response)
|
|
38
|
+
|
|
39
|
+
responses = pagination(client, response)
|
|
40
|
+
|
|
41
|
+
for r in responses:
|
|
42
|
+
for v in r.get("value", []):
|
|
43
|
+
new_data = {
|
|
44
|
+
"Eventstream Name": v.get("displayName"),
|
|
45
|
+
"Eventstream Id": v.get("id"),
|
|
46
|
+
"Description": v.get("description"),
|
|
47
|
+
}
|
|
48
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
49
|
+
|
|
50
|
+
return df
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def create_eventstream(
|
|
54
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
55
|
+
):
|
|
56
|
+
"""
|
|
57
|
+
Creates a Fabric eventstream.
|
|
58
|
+
|
|
59
|
+
Parameters
|
|
60
|
+
----------
|
|
61
|
+
name: str
|
|
62
|
+
Name of the eventstream.
|
|
63
|
+
description : str, default=None
|
|
64
|
+
A description of the environment.
|
|
65
|
+
workspace : str, default=None
|
|
66
|
+
The Fabric workspace name.
|
|
67
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
68
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
72
|
+
|
|
73
|
+
request_body = {"displayName": name}
|
|
74
|
+
|
|
75
|
+
if description:
|
|
76
|
+
request_body["description"] = description
|
|
77
|
+
|
|
78
|
+
client = fabric.FabricRestClient()
|
|
79
|
+
response = client.post(
|
|
80
|
+
f"/v1/workspaces/{workspace_id}/eventstreams", json=request_body
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
lro(client, response, status_codes=[201, 202])
|
|
84
|
+
|
|
85
|
+
print(
|
|
86
|
+
f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace}' workspace."
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def delete_eventstream(name: str, workspace: Optional[str] = None):
|
|
91
|
+
"""
|
|
92
|
+
Deletes a Fabric eventstream.
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
----------
|
|
96
|
+
name: str
|
|
97
|
+
Name of the eventstream.
|
|
98
|
+
workspace : str, default=None
|
|
99
|
+
The Fabric workspace name.
|
|
100
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
101
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
105
|
+
|
|
106
|
+
item_id = fabric.resolve_item_id(
|
|
107
|
+
item_name=name, type="Eventstream", workspace=workspace
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
client = fabric.FabricRestClient()
|
|
111
|
+
response = client.delete(f"/v1/workspaces/{workspace_id}/eventstreams/{item_id}")
|
|
112
|
+
|
|
113
|
+
if response.status_code != 200:
|
|
114
|
+
raise FabricHTTPException(response)
|
|
115
|
+
|
|
116
|
+
print(
|
|
117
|
+
f"{icons.green_dot} The '{name}' eventstream within the '{workspace}' workspace has been deleted."
|
|
118
|
+
)
|
|
@@ -20,7 +20,7 @@ def create_blank_semantic_model(
|
|
|
20
20
|
dataset: str,
|
|
21
21
|
compatibility_level: int = 1605,
|
|
22
22
|
workspace: Optional[str] = None,
|
|
23
|
-
overwrite:
|
|
23
|
+
overwrite: bool = True,
|
|
24
24
|
):
|
|
25
25
|
"""
|
|
26
26
|
Creates a new blank semantic model (no tables/columns etc.).
|
|
@@ -212,8 +212,8 @@ def deploy_semantic_model(
|
|
|
212
212
|
source_workspace: Optional[str] = None,
|
|
213
213
|
target_dataset: Optional[str] = None,
|
|
214
214
|
target_workspace: Optional[str] = None,
|
|
215
|
-
refresh_target_dataset:
|
|
216
|
-
overwrite:
|
|
215
|
+
refresh_target_dataset: bool = True,
|
|
216
|
+
overwrite: bool = False,
|
|
217
217
|
):
|
|
218
218
|
"""
|
|
219
219
|
Deploys a semantic model based on an existing semantic model.
|
sempy_labs/_git.py
CHANGED
|
@@ -132,19 +132,18 @@ def get_git_status(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
132
132
|
client = fabric.FabricRestClient()
|
|
133
133
|
response = client.get(f"/v1/workspaces/{workspace_id}/git/status")
|
|
134
134
|
|
|
135
|
-
if response not in [200, 202]:
|
|
135
|
+
if response.status_code not in [200, 202]:
|
|
136
136
|
raise FabricHTTPException(response)
|
|
137
137
|
|
|
138
138
|
result = lro(client, response).json()
|
|
139
139
|
|
|
140
|
-
for
|
|
141
|
-
changes = v.get("changes", [])
|
|
140
|
+
for changes in result.get("changes", []):
|
|
142
141
|
item_metadata = changes.get("itemMetadata", {})
|
|
143
142
|
item_identifier = item_metadata.get("itemIdentifier", {})
|
|
144
143
|
|
|
145
144
|
new_data = {
|
|
146
|
-
"Workspace Head":
|
|
147
|
-
"Remote Commit Hash":
|
|
145
|
+
"Workspace Head": result.get("workspaceHead"),
|
|
146
|
+
"Remote Commit Hash": result.get("remoteCommitHash"),
|
|
148
147
|
"Object ID": item_identifier.get("objectId"),
|
|
149
148
|
"Logical ID": item_identifier.get("logicalId"),
|
|
150
149
|
"Item Type": item_metadata.get("itemType"),
|
|
@@ -199,21 +198,21 @@ def get_git_connection(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
199
198
|
if response.status_code != 200:
|
|
200
199
|
raise FabricHTTPException(response)
|
|
201
200
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
201
|
+
r = response.json()
|
|
202
|
+
provider_details = r.get("gitProviderDetails", {})
|
|
203
|
+
sync_details = r.get("gitSyncDetails", {})
|
|
204
|
+
new_data = {
|
|
205
|
+
"Organization Name": provider_details.get("organizationName"),
|
|
206
|
+
"Project Name": provider_details.get("projectName"),
|
|
207
|
+
"Git Provider Type": provider_details.get("gitProviderType"),
|
|
208
|
+
"Repository Name": provider_details.get("repositoryName"),
|
|
209
|
+
"Branch Name": provider_details.get("branchName"),
|
|
210
|
+
"Directory Name": provider_details.get("directoryName"),
|
|
211
|
+
"Workspace Head": sync_details.get("head"),
|
|
212
|
+
"Last Sync Time": sync_details.get("lastSyncTime"),
|
|
213
|
+
"Git Connection State": r.get("gitConnectionState"),
|
|
214
|
+
}
|
|
215
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
217
216
|
|
|
218
217
|
return df
|
|
219
218
|
|
|
@@ -237,7 +236,7 @@ def initialize_git_connection(workspace: Optional[str] = None):
|
|
|
237
236
|
client = fabric.FabricRestClient()
|
|
238
237
|
response = client.post(f"/v1/workspaces/{workspace_id}/git/initializeConnection")
|
|
239
238
|
|
|
240
|
-
if response not in [200, 202]:
|
|
239
|
+
if response.status_code not in [200, 202]:
|
|
241
240
|
raise FabricHTTPException(response)
|
|
242
241
|
|
|
243
242
|
lro(client, response)
|
|
@@ -315,7 +314,7 @@ def update_from_git(
|
|
|
315
314
|
remote_commit_hash: str,
|
|
316
315
|
conflict_resolution_policy: str,
|
|
317
316
|
workspace_head: Optional[str] = None,
|
|
318
|
-
allow_override:
|
|
317
|
+
allow_override: bool = False,
|
|
319
318
|
workspace: Optional[str] = None,
|
|
320
319
|
):
|
|
321
320
|
"""
|
|
@@ -342,9 +341,9 @@ def update_from_git(
|
|
|
342
341
|
workspace, workspace_id = resolve_workspace_name_and_id(workspace)
|
|
343
342
|
|
|
344
343
|
conflict_resolution_policies = ["PreferWorkspace", "PreferRemote"]
|
|
345
|
-
if "remote" in
|
|
344
|
+
if "remote" in [policy.lower() for policy in conflict_resolution_policies]:
|
|
346
345
|
conflict_resolution_policies = "PreferRemote"
|
|
347
|
-
elif "workspace" in
|
|
346
|
+
elif "workspace" in [policy.lower() for policy in conflict_resolution_policies]:
|
|
348
347
|
conflict_resolution_policies = "PreferWorkspace"
|
|
349
348
|
|
|
350
349
|
if conflict_resolution_policy not in conflict_resolution_policies:
|