semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +5 -4
- semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
- sempy_labs/__init__.py +56 -56
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_dax_query_view.py +1 -1
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +23 -22
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_ml_models.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +1 -1
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_variable_libraries.py +13 -12
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +84 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +1 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +111 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +30 -2
- sempy_labs/lakehouse/_lakehouse.py +2 -2
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +247 -249
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +19 -6
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +5 -5
- semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
sempy_labs/_gateways.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from sempy._utils._log import log
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_is_valid_uuid,
|
|
6
6
|
resolve_capacity_id,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
@@ -47,26 +47,28 @@ def list_gateways() -> pd.DataFrame:
|
|
|
47
47
|
request="/v1/gateways", client="fabric_sp", uses_pagination=True
|
|
48
48
|
)
|
|
49
49
|
|
|
50
|
-
|
|
50
|
+
rows = []
|
|
51
51
|
for r in responses:
|
|
52
52
|
for v in r.get("value", []):
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
53
|
+
rows.append(
|
|
54
|
+
{
|
|
55
|
+
"Gateway Name": v.get("displayName"),
|
|
56
|
+
"Gateway Id": v.get("id"),
|
|
57
|
+
"Type": v.get("type"),
|
|
58
|
+
"Public Key Exponent": v.get("publicKey", {}).get("exponent"),
|
|
59
|
+
"Public Key Modulus": v.get("publicKey", {}).get("modulus"),
|
|
60
|
+
"Version": v.get("version"),
|
|
61
|
+
"Number Of Member Gateways": v.get("numberOfMemberGateways", 0),
|
|
62
|
+
"Load Balancing Setting": v.get("loadBalancingSetting"),
|
|
63
|
+
"Allow Cloud Connection Refresh": v.get(
|
|
64
|
+
"allowCloudConnectionRefresh"
|
|
65
|
+
),
|
|
66
|
+
"Allow Custom Connectors": v.get("allowCustomConnectors"),
|
|
67
|
+
}
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
if rows:
|
|
71
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
70
72
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
71
73
|
|
|
72
74
|
return df
|
|
@@ -141,20 +143,20 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
|
141
143
|
uses_pagination=True,
|
|
142
144
|
)
|
|
143
145
|
|
|
144
|
-
|
|
146
|
+
rows = []
|
|
145
147
|
for r in responses:
|
|
146
148
|
for v in r.get("value", []):
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
if
|
|
157
|
-
df = pd.
|
|
149
|
+
rows.append(
|
|
150
|
+
{
|
|
151
|
+
"Gateway Role Assignment Id": v.get("id"),
|
|
152
|
+
"Principal Id": v.get("principal", {}).get("id"),
|
|
153
|
+
"Principal Type": v.get("principal", {}).get("type"),
|
|
154
|
+
"Role": v.get("role"),
|
|
155
|
+
}
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
if rows:
|
|
159
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
158
160
|
|
|
159
161
|
return df
|
|
160
162
|
|
|
@@ -274,21 +276,21 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
|
274
276
|
request=f"/v1/gateways/{gateway_id}/members", client="fabric_sp"
|
|
275
277
|
)
|
|
276
278
|
|
|
277
|
-
|
|
279
|
+
rows = []
|
|
278
280
|
for v in response.json().get("value", []):
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
if
|
|
291
|
-
df = pd.
|
|
281
|
+
rows.append(
|
|
282
|
+
{
|
|
283
|
+
"Member Id": v.get("id"),
|
|
284
|
+
"Member Name": v.get("displayName"),
|
|
285
|
+
"Public Key Exponent": v.get("publicKey", {}).get("exponent"),
|
|
286
|
+
"Public Key Modulus": v.get("publicKey", {}).get("modulus"),
|
|
287
|
+
"Version": v.get("version"),
|
|
288
|
+
"Enabled": v.get("enabled"),
|
|
289
|
+
}
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
if rows:
|
|
293
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
292
294
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
293
295
|
|
|
294
296
|
return df
|
|
@@ -4,7 +4,7 @@ import json
|
|
|
4
4
|
import os
|
|
5
5
|
from typing import Optional, List
|
|
6
6
|
from sempy._utils._log import log
|
|
7
|
-
from
|
|
7
|
+
from ._helper_functions import (
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
9
|
resolve_dataset_name_and_id,
|
|
10
10
|
_conv_b64,
|
|
@@ -13,9 +13,9 @@ from sempy_labs._helper_functions import (
|
|
|
13
13
|
_mount,
|
|
14
14
|
resolve_workspace_id,
|
|
15
15
|
)
|
|
16
|
-
from
|
|
16
|
+
from .lakehouse._lakehouse import lakehouse_attached
|
|
17
17
|
import sempy_labs._icons as icons
|
|
18
|
-
from
|
|
18
|
+
from ._refresh_semantic_model import refresh_semantic_model
|
|
19
19
|
from uuid import UUID
|
|
20
20
|
|
|
21
21
|
|
sempy_labs/_git.py
CHANGED
sempy_labs/_graphQL.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
resolve_workspace_id,
|
|
@@ -47,18 +47,19 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
47
47
|
client="fabric_sp",
|
|
48
48
|
)
|
|
49
49
|
|
|
50
|
-
|
|
50
|
+
rows = []
|
|
51
51
|
for r in responses:
|
|
52
52
|
for v in r.get("value", []):
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
53
|
+
rows.append(
|
|
54
|
+
{
|
|
55
|
+
"GraphQL API Name": v.get("displayName"),
|
|
56
|
+
"GraphQL API Id": v.get("id"),
|
|
57
|
+
"Description": v.get("description"),
|
|
58
|
+
}
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
if rows:
|
|
62
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
62
63
|
|
|
63
64
|
return df
|
|
64
65
|
|
sempy_labs/_job_scheduler.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from sempy._utils._log import log
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional, List
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_item_name_and_id,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
@@ -67,27 +67,28 @@ def list_item_job_instances(
|
|
|
67
67
|
if not responses[0].get("value"):
|
|
68
68
|
return df
|
|
69
69
|
|
|
70
|
-
|
|
70
|
+
rows = []
|
|
71
71
|
for r in responses:
|
|
72
72
|
for v in r.get("value", []):
|
|
73
73
|
fail = v.get("failureReason", {})
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
74
|
+
rows.append(
|
|
75
|
+
{
|
|
76
|
+
"Job Instance Id": v.get("id"),
|
|
77
|
+
"Item Name": item_name,
|
|
78
|
+
"Item Id": v.get("itemId"),
|
|
79
|
+
"Item Type": type,
|
|
80
|
+
"Job Type": v.get("jobType"),
|
|
81
|
+
"Invoke Type": v.get("invokeType"),
|
|
82
|
+
"Status": v.get("status"),
|
|
83
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
84
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
85
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
86
|
+
"Error Message": fail.get("message") if fail is not None else "",
|
|
87
|
+
}
|
|
88
|
+
)
|
|
88
89
|
|
|
89
|
-
if
|
|
90
|
-
df = pd.
|
|
90
|
+
if rows:
|
|
91
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
91
92
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
92
93
|
|
|
93
94
|
return df
|
|
@@ -111,24 +112,25 @@ def _get_item_job_instance(url: str) -> pd.DataFrame:
|
|
|
111
112
|
|
|
112
113
|
response = _base_api(request=url)
|
|
113
114
|
|
|
114
|
-
|
|
115
|
+
rows = []
|
|
115
116
|
for v in response.json().get("value", []):
|
|
116
117
|
fail = v.get("failureReason", {})
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
118
|
+
rows.append(
|
|
119
|
+
{
|
|
120
|
+
"Job Instance Id": v.get("id"),
|
|
121
|
+
"Item Id": v.get("itemId"),
|
|
122
|
+
"Job Type": v.get("jobType"),
|
|
123
|
+
"Invoke Type": v.get("invokeType"),
|
|
124
|
+
"Status": v.get("status"),
|
|
125
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
126
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
127
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
128
|
+
"Error Message": fail.get("message") if fail is not None else "",
|
|
129
|
+
}
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
if rows:
|
|
133
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
132
134
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
133
135
|
|
|
134
136
|
return df
|
|
@@ -190,29 +192,29 @@ def list_item_schedules(
|
|
|
190
192
|
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules"
|
|
191
193
|
)
|
|
192
194
|
|
|
193
|
-
|
|
195
|
+
rows = []
|
|
194
196
|
for v in response.json().get("value", []):
|
|
195
197
|
config = v.get("configuration", {})
|
|
196
198
|
own = v.get("owner", {})
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
if
|
|
215
|
-
df = pd.
|
|
199
|
+
rows.append(
|
|
200
|
+
{
|
|
201
|
+
"Job Schedule Id": v.get("id"),
|
|
202
|
+
"Enabled": v.get("enabled"),
|
|
203
|
+
"Created Date Time": v.get("createdDateTime"),
|
|
204
|
+
"Start Date Time": config.get("startDateTime"),
|
|
205
|
+
"End Date Time": config.get("endDateTime"),
|
|
206
|
+
"Local Time Zone Id": config.get("localTimeZoneId"),
|
|
207
|
+
"Type": config.get("type"),
|
|
208
|
+
"Interval": config.get("interval"),
|
|
209
|
+
"Weekdays": config.get("weekdays"),
|
|
210
|
+
"Times": config.get("times"),
|
|
211
|
+
"Owner Id": own.get("id"),
|
|
212
|
+
"Owner Type": own.get("type"),
|
|
213
|
+
}
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
if rows:
|
|
217
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
216
218
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
217
219
|
|
|
218
220
|
return df
|
sempy_labs/_kql_databases.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
4
|
-
resolve_workspace_name_and_id,
|
|
3
|
+
from ._helper_functions import (
|
|
5
4
|
_base_api,
|
|
6
5
|
_create_dataframe,
|
|
7
6
|
delete_item,
|
|
@@ -55,24 +54,24 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
55
54
|
client="fabric_sp",
|
|
56
55
|
)
|
|
57
56
|
|
|
58
|
-
|
|
57
|
+
rows = []
|
|
59
58
|
for r in responses:
|
|
60
59
|
for v in r.get("value", []):
|
|
61
60
|
prop = v.get("properties", {})
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
if
|
|
75
|
-
df = pd.
|
|
61
|
+
rows.append(
|
|
62
|
+
{
|
|
63
|
+
"KQL Database Name": v.get("displayName"),
|
|
64
|
+
"KQL Database Id": v.get("id"),
|
|
65
|
+
"Description": v.get("description"),
|
|
66
|
+
"Parent Eventhouse Item Id": prop.get("parentEventhouseItemId"),
|
|
67
|
+
"Query Service URI": prop.get("queryServiceUri"),
|
|
68
|
+
"Ingestion Service URI": prop.get("ingestionServiceUri"),
|
|
69
|
+
"Database Type": prop.get("databaseType"),
|
|
70
|
+
}
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
if rows:
|
|
74
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
76
75
|
|
|
77
76
|
return df
|
|
78
77
|
|
sempy_labs/_kql_querysets.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_id,
|
|
6
6
|
_base_api,
|
|
7
7
|
_create_dataframe,
|
|
@@ -45,18 +45,19 @@ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
45
45
|
request=f"v1/workspaces/{workspace_id}/kqlQuerysets", uses_pagination=True
|
|
46
46
|
)
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
rows = []
|
|
49
49
|
for r in responses:
|
|
50
50
|
for v in r.get("value", []):
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
51
|
+
rows.append(
|
|
52
|
+
{
|
|
53
|
+
"KQL Queryset Name": v.get("displayName"),
|
|
54
|
+
"KQL Queryset Id": v.get("id"),
|
|
55
|
+
"Description": v.get("description"),
|
|
56
|
+
}
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
if rows:
|
|
60
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
60
61
|
|
|
61
62
|
return df
|
|
62
63
|
|
sempy_labs/_kusto.py
CHANGED
|
@@ -5,8 +5,8 @@ from sempy._utils._log import log
|
|
|
5
5
|
import sempy_labs._icons as icons
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from uuid import UUID
|
|
8
|
-
from
|
|
9
|
-
from
|
|
8
|
+
from ._kql_databases import _resolve_cluster_uri
|
|
9
|
+
from ._helper_functions import resolve_item_id
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
@log
|
sempy_labs/_list_functions.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_is_valid_uuid,
|
|
7
7
|
_base_api,
|
|
@@ -118,23 +118,26 @@ def list_managed_private_endpoints(
|
|
|
118
118
|
client="fabric_sp",
|
|
119
119
|
)
|
|
120
120
|
|
|
121
|
-
|
|
121
|
+
rows = []
|
|
122
122
|
for r in responses:
|
|
123
123
|
for v in r.get("value", []):
|
|
124
124
|
conn = v.get("connectionState", {})
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
125
|
+
rows.append(
|
|
126
|
+
{
|
|
127
|
+
"Managed Private Endpoint Name": v.get("name"),
|
|
128
|
+
"Managed Private Endpoint Id": v.get("id"),
|
|
129
|
+
"Target Private Link Resource Id": v.get(
|
|
130
|
+
"targetPrivateLinkResourceId"
|
|
131
|
+
),
|
|
132
|
+
"Provisioning State": v.get("provisioningState"),
|
|
133
|
+
"Connection Status": conn.get("status"),
|
|
134
|
+
"Connection Description": conn.get("description"),
|
|
135
|
+
"Target Subresource Type": v.get("targetSubresourceType"),
|
|
136
|
+
}
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
if rows:
|
|
140
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
138
141
|
|
|
139
142
|
return df
|
|
140
143
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
5
|
_update_dataframe_datatypes,
|
|
6
6
|
_base_api,
|
|
@@ -58,25 +58,26 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
|
|
|
58
58
|
client="fabric_sp",
|
|
59
59
|
)
|
|
60
60
|
|
|
61
|
-
|
|
61
|
+
rows = []
|
|
62
62
|
for r in responses:
|
|
63
63
|
for v in r.get("value", []):
|
|
64
64
|
prop = v.get("properties", {})
|
|
65
65
|
sql = prop.get("sqlEndpointProperties", {})
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
66
|
+
rows.append(
|
|
67
|
+
{
|
|
68
|
+
"Mirrored Database Name": v.get("displayName"),
|
|
69
|
+
"Mirrored Database Id": v.get("id"),
|
|
70
|
+
"Description": v.get("description"),
|
|
71
|
+
"OneLake Tables Path": prop.get("oneLakeTablesPath"),
|
|
72
|
+
"SQL Endpoint Connection String": sql.get("connectionString"),
|
|
73
|
+
"SQL Endpoint Id": sql.get("id"),
|
|
74
|
+
"Provisioning Status": sql.get("provisioningStatus"),
|
|
75
|
+
"Default Schema": prop.get("defaultSchema"),
|
|
76
|
+
}
|
|
77
|
+
)
|
|
77
78
|
|
|
78
|
-
if
|
|
79
|
-
df = pd.
|
|
79
|
+
if rows:
|
|
80
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
80
81
|
|
|
81
82
|
return df
|
|
82
83
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
@@ -43,17 +43,18 @@ def list_mirrored_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataF
|
|
|
43
43
|
uses_pagination=True,
|
|
44
44
|
)
|
|
45
45
|
|
|
46
|
-
|
|
46
|
+
rows = []
|
|
47
47
|
for r in responses:
|
|
48
48
|
for v in r.get("value", []):
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
49
|
+
rows.append(
|
|
50
|
+
{
|
|
51
|
+
"Mirrored Warehouse Name": v.get("displayName"),
|
|
52
|
+
"Mirrored Warehouse Id": v.get("id"),
|
|
53
|
+
"Description": v.get("description"),
|
|
54
|
+
}
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
if rows:
|
|
58
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
58
59
|
|
|
59
60
|
return df
|
sempy_labs/_ml_experiments.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
delete_item,
|
|
@@ -46,22 +46,23 @@ def list_ml_experiments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
46
46
|
uses_pagination=True,
|
|
47
47
|
)
|
|
48
48
|
|
|
49
|
-
|
|
49
|
+
rows = []
|
|
50
50
|
for r in responses:
|
|
51
51
|
for v in r.get("value", []):
|
|
52
52
|
model_id = v.get("id")
|
|
53
53
|
modelName = v.get("displayName")
|
|
54
54
|
desc = v.get("description")
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
56
|
+
rows.append(
|
|
57
|
+
{
|
|
58
|
+
"ML Experiment Name": modelName,
|
|
59
|
+
"ML Experiment Id": model_id,
|
|
60
|
+
"Description": desc,
|
|
61
|
+
}
|
|
62
|
+
)
|
|
62
63
|
|
|
63
|
-
if
|
|
64
|
-
df = pd.
|
|
64
|
+
if rows:
|
|
65
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
65
66
|
|
|
66
67
|
return df
|
|
67
68
|
|