semantic-link-labs 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +6 -4
- semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
- sempy_labs/__init__.py +56 -56
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_dax_query_view.py +1 -1
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +23 -22
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_ml_models.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +1 -1
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_variable_libraries.py +13 -12
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +86 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +1 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +111 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +30 -2
- sempy_labs/lakehouse/_lakehouse.py +2 -2
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +247 -249
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +35 -1
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +23 -20
- semantic_link_labs-0.11.0.dist-info/RECORD +0 -210
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
sempy_labs/_ml_models.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
delete_item,
|
|
@@ -46,22 +46,23 @@ def list_ml_models(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
46
46
|
uses_pagination=True,
|
|
47
47
|
)
|
|
48
48
|
|
|
49
|
-
|
|
49
|
+
rows = []
|
|
50
50
|
for r in responses:
|
|
51
51
|
for v in r.get("value", []):
|
|
52
52
|
model_id = v.get("id")
|
|
53
53
|
modelName = v.get("displayName")
|
|
54
54
|
desc = v.get("description")
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
56
|
+
rows.append(
|
|
57
|
+
{
|
|
58
|
+
"ML Model Name": modelName,
|
|
59
|
+
"ML Model Id": model_id,
|
|
60
|
+
"Description": desc,
|
|
61
|
+
}
|
|
62
|
+
)
|
|
62
63
|
|
|
63
|
-
if
|
|
64
|
-
df = pd.
|
|
64
|
+
if rows:
|
|
65
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
65
66
|
|
|
66
67
|
return df
|
|
67
68
|
|
sempy_labs/_model_auto_build.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
3
|
+
from .tom import connect_semantic_model
|
|
4
|
+
from ._generate_semantic_model import create_blank_semantic_model
|
|
5
|
+
from .directlake._generate_shared_expression import generate_shared_expression
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from sempy._utils._log import log
|
|
8
8
|
|
sempy_labs/_model_bpa.py
CHANGED
|
@@ -3,8 +3,8 @@ import pandas as pd
|
|
|
3
3
|
import warnings
|
|
4
4
|
import datetime
|
|
5
5
|
from IPython.display import display, HTML
|
|
6
|
-
from
|
|
7
|
-
from
|
|
6
|
+
from ._model_dependencies import get_model_calc_dependencies
|
|
7
|
+
from ._helper_functions import (
|
|
8
8
|
format_dax_object_name,
|
|
9
9
|
create_relationship_name,
|
|
10
10
|
save_as_delta_table,
|
|
@@ -15,9 +15,9 @@ from sempy_labs._helper_functions import (
|
|
|
15
15
|
resolve_workspace_name_and_id,
|
|
16
16
|
_create_spark_session,
|
|
17
17
|
)
|
|
18
|
-
from
|
|
19
|
-
from
|
|
20
|
-
from
|
|
18
|
+
from .lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
19
|
+
from .tom import connect_semantic_model
|
|
20
|
+
from ._model_bpa_rules import model_bpa_rules
|
|
21
21
|
from typing import Optional
|
|
22
22
|
from sempy._utils._log import log
|
|
23
23
|
import sempy_labs._icons as icons
|
sempy_labs/_model_bpa_bulk.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import datetime
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
save_as_delta_table,
|
|
6
6
|
resolve_workspace_capacity,
|
|
7
7
|
retry,
|
|
@@ -9,11 +9,11 @@ from sempy_labs._helper_functions import (
|
|
|
9
9
|
resolve_workspace_id,
|
|
10
10
|
resolve_lakehouse_name_and_id,
|
|
11
11
|
)
|
|
12
|
-
from
|
|
12
|
+
from .lakehouse import (
|
|
13
13
|
get_lakehouse_tables,
|
|
14
14
|
lakehouse_attached,
|
|
15
15
|
)
|
|
16
|
-
from
|
|
16
|
+
from ._model_bpa import run_model_bpa
|
|
17
17
|
from typing import Optional, List
|
|
18
18
|
from sempy._utils._log import log
|
|
19
19
|
import sempy_labs._icons as icons
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
@@ -49,19 +49,19 @@ def list_mounted_data_factories(
|
|
|
49
49
|
uses_pagination=True,
|
|
50
50
|
)
|
|
51
51
|
|
|
52
|
-
|
|
52
|
+
rows = []
|
|
53
53
|
for r in responses:
|
|
54
54
|
for v in r.get("value", []):
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
if
|
|
64
|
-
df = pd.
|
|
55
|
+
rows.append(
|
|
56
|
+
{
|
|
57
|
+
"Mounted Data Factory Name": v.get("displayName"),
|
|
58
|
+
"Mounted Data Factory Id": v.get("id"),
|
|
59
|
+
"Description": v.get("description"),
|
|
60
|
+
}
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if rows:
|
|
64
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
65
65
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
66
66
|
|
|
67
67
|
return df
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -2,7 +2,7 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy._utils._log import log
|
|
5
|
-
from
|
|
5
|
+
from ._helper_functions import (
|
|
6
6
|
resolve_workspace_name_and_id,
|
|
7
7
|
resolve_dataset_name_and_id,
|
|
8
8
|
resolve_workspace_id,
|
sempy_labs/_query_scale_out.py
CHANGED
sempy_labs/_semantic_models.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_base_api,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
@@ -298,39 +298,41 @@ def list_semantic_model_datasources(
|
|
|
298
298
|
client="fabric_sp",
|
|
299
299
|
)
|
|
300
300
|
|
|
301
|
-
|
|
301
|
+
rows = []
|
|
302
302
|
for item in response.json().get("value", []):
|
|
303
303
|
ds_type = item.get("datasourceType")
|
|
304
304
|
conn_details = item.get("connectionDetails", {})
|
|
305
305
|
ds_id = item.get("datasourceId")
|
|
306
306
|
gateway_id = item.get("gatewayId")
|
|
307
307
|
if expand_details:
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
308
|
+
rows.append(
|
|
309
|
+
{
|
|
310
|
+
"Datasource Type": ds_type,
|
|
311
|
+
"Connection Server": conn_details.get("server"),
|
|
312
|
+
"Connection Database": conn_details.get("database"),
|
|
313
|
+
"Connection Path": conn_details.get("path"),
|
|
314
|
+
"Connection Account": conn_details.get("account"),
|
|
315
|
+
"Connection Domain": conn_details.get("domain"),
|
|
316
|
+
"Connection Kind": conn_details.get("kind"),
|
|
317
|
+
"Connection Email Address": conn_details.get("emailAddress"),
|
|
318
|
+
"Connection URL": conn_details.get("url"),
|
|
319
|
+
"Connection Class Info": conn_details.get("classInfo"),
|
|
320
|
+
"Connection Login Server": conn_details.get("loginServer"),
|
|
321
|
+
"Datasource Id": ds_id,
|
|
322
|
+
"Gateway Id": gateway_id,
|
|
323
|
+
}
|
|
324
|
+
)
|
|
324
325
|
else:
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
326
|
+
rows.append(
|
|
327
|
+
{
|
|
328
|
+
"Datasource Type": ds_type,
|
|
329
|
+
"Connection Details": conn_details,
|
|
330
|
+
"Datasource Id": ds_id,
|
|
331
|
+
"Gateway Id": gateway_id,
|
|
332
|
+
}
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
if rows:
|
|
336
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
335
337
|
|
|
336
338
|
return df
|
sempy_labs/_spark.py
CHANGED
sempy_labs/_sql.py
CHANGED
|
@@ -3,7 +3,7 @@ from typing import Optional, Union, List
|
|
|
3
3
|
from sempy._utils._log import log
|
|
4
4
|
import struct
|
|
5
5
|
from itertools import chain, repeat
|
|
6
|
-
from
|
|
6
|
+
from ._helper_functions import (
|
|
7
7
|
resolve_lakehouse_name_and_id,
|
|
8
8
|
resolve_item_name_and_id,
|
|
9
9
|
resolve_workspace_name_and_id,
|
sempy_labs/_sql_endpoints.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from typing import Optional, Literal
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
@@ -44,19 +44,19 @@ def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
44
44
|
request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
|
|
45
45
|
)
|
|
46
46
|
|
|
47
|
-
|
|
47
|
+
rows = []
|
|
48
48
|
for r in responses:
|
|
49
49
|
for v in r.get("value", []):
|
|
50
|
+
rows.append(
|
|
51
|
+
{
|
|
52
|
+
"SQL Endpoint Id": v.get("id"),
|
|
53
|
+
"SQL Endpoint Name": v.get("displayName"),
|
|
54
|
+
"Description": v.get("description"),
|
|
55
|
+
}
|
|
56
|
+
)
|
|
50
57
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
"SQL Endpoint Name": v.get("displayName"),
|
|
54
|
-
"Description": v.get("description"),
|
|
55
|
-
}
|
|
56
|
-
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
57
|
-
|
|
58
|
-
if dfs:
|
|
59
|
-
df = pd.concat(dfs, ignore_index=True)
|
|
58
|
+
if rows:
|
|
59
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
60
60
|
|
|
61
61
|
return df
|
|
62
62
|
|
|
@@ -142,6 +142,7 @@ def refresh_sql_endpoint_metadata(
|
|
|
142
142
|
result = _base_api(
|
|
143
143
|
request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata?preview=true",
|
|
144
144
|
method="post",
|
|
145
|
+
client="fabric_sp",
|
|
145
146
|
status_codes=[200, 202],
|
|
146
147
|
lro_return_json=True,
|
|
147
148
|
payload=payload,
|
sempy_labs/_sqldatabase.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
resolve_workspace_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -100,23 +100,23 @@ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
100
100
|
client="fabric_sp",
|
|
101
101
|
)
|
|
102
102
|
|
|
103
|
-
|
|
103
|
+
rows = []
|
|
104
104
|
for r in responses:
|
|
105
105
|
for v in r.get("value", []):
|
|
106
106
|
prop = v.get("properties", {})
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
if
|
|
119
|
-
df = pd.
|
|
107
|
+
rows.append(
|
|
108
|
+
{
|
|
109
|
+
"SQL Database Name": v.get("displayName"),
|
|
110
|
+
"SQL Database Id": v.get("id"),
|
|
111
|
+
"Description": v.get("description"),
|
|
112
|
+
"Connection Info": prop.get("connectionInfo"),
|
|
113
|
+
"Database Name": prop.get("databaseName"),
|
|
114
|
+
"Server FQDN": prop.get("serverFqdn"),
|
|
115
|
+
}
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
if rows:
|
|
119
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
120
120
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
121
121
|
|
|
122
122
|
return df
|
sempy_labs/_tags.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_create_dataframe,
|
|
4
4
|
_update_dataframe_datatypes,
|
|
@@ -40,17 +40,18 @@ def list_tags() -> pd.DataFrame:
|
|
|
40
40
|
client="fabric_sp",
|
|
41
41
|
)
|
|
42
42
|
|
|
43
|
-
|
|
43
|
+
rows = []
|
|
44
44
|
for r in responses:
|
|
45
45
|
for v in r.get("value", []):
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
46
|
+
rows.append(
|
|
47
|
+
{
|
|
48
|
+
"Tag Name": v.get("displayName"),
|
|
49
|
+
"Tag Id": v.get("id"),
|
|
50
|
+
}
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
if rows:
|
|
54
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
54
55
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
55
56
|
|
|
56
57
|
return df
|
sempy_labs/_translations.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
resolve_workspace_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -49,21 +49,22 @@ def list_variable_libraries(workspace: Optional[str | UUID] = None) -> pd.DataFr
|
|
|
49
49
|
client="fabric_sp",
|
|
50
50
|
)
|
|
51
51
|
|
|
52
|
-
|
|
52
|
+
rows = []
|
|
53
53
|
for r in responses:
|
|
54
54
|
for v in r.get("value", []):
|
|
55
55
|
prop = v.get("properties", {})
|
|
56
56
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
57
|
+
rows.append(
|
|
58
|
+
{
|
|
59
|
+
"Variable Library Name": v.get("displayName"),
|
|
60
|
+
"Variable Library Id": v.get("id"),
|
|
61
|
+
"Description": v.get("description"),
|
|
62
|
+
"Active Value Set Name": prop.get("activeValueSetName"),
|
|
63
|
+
}
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
if rows:
|
|
67
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
67
68
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
68
69
|
|
|
69
70
|
return df
|
sempy_labs/_vertipaq.py
CHANGED
|
@@ -6,7 +6,7 @@ import os
|
|
|
6
6
|
import shutil
|
|
7
7
|
import datetime
|
|
8
8
|
import warnings
|
|
9
|
-
from
|
|
9
|
+
from ._helper_functions import (
|
|
10
10
|
format_dax_object_name,
|
|
11
11
|
save_as_delta_table,
|
|
12
12
|
resolve_workspace_capacity,
|
|
@@ -17,8 +17,8 @@ from sempy_labs._helper_functions import (
|
|
|
17
17
|
resolve_workspace_id,
|
|
18
18
|
resolve_workspace_name,
|
|
19
19
|
)
|
|
20
|
-
from
|
|
21
|
-
from
|
|
20
|
+
from ._list_functions import list_relationships, list_tables
|
|
21
|
+
from .lakehouse import lakehouse_attached, get_lakehouse_tables
|
|
22
22
|
from typing import Optional
|
|
23
23
|
from sempy._utils._log import log
|
|
24
24
|
import sempy_labs._icons as icons
|
sempy_labs/_vpax.py
CHANGED
|
@@ -6,7 +6,7 @@ import sys
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from uuid import UUID
|
|
9
|
-
from
|
|
9
|
+
from ._helper_functions import (
|
|
10
10
|
resolve_workspace_name_and_id,
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
12
12
|
resolve_lakehouse_name_and_id,
|
sempy_labs/_warehouses.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
resolve_workspace_name_and_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -111,23 +111,24 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
111
111
|
client="fabric_sp",
|
|
112
112
|
)
|
|
113
113
|
|
|
114
|
-
|
|
114
|
+
rows = []
|
|
115
115
|
for r in responses:
|
|
116
116
|
for v in r.get("value", []):
|
|
117
117
|
prop = v.get("properties", {})
|
|
118
118
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
119
|
+
rows.append(
|
|
120
|
+
{
|
|
121
|
+
"Warehouse Name": v.get("displayName"),
|
|
122
|
+
"Warehouse Id": v.get("id"),
|
|
123
|
+
"Description": v.get("description"),
|
|
124
|
+
"Connection Info": prop.get("connectionInfo"),
|
|
125
|
+
"Created Date": prop.get("createdDate"),
|
|
126
|
+
"Last Updated Time": prop.get("lastUpdatedTime"),
|
|
127
|
+
}
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
if rows:
|
|
131
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
131
132
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
132
133
|
|
|
133
134
|
return df
|
sempy_labs/_workloads.py
CHANGED
sempy_labs/_workspaces.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_capacity_id,
|
|
7
7
|
_base_api,
|
|
@@ -144,21 +144,22 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
144
144
|
client="fabric_sp",
|
|
145
145
|
)
|
|
146
146
|
|
|
147
|
-
|
|
147
|
+
rows = []
|
|
148
148
|
for r in responses:
|
|
149
149
|
for v in r.get("value", []):
|
|
150
150
|
p = v.get("principal", {})
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
151
|
+
rows.append(
|
|
152
|
+
{
|
|
153
|
+
"User Name": p.get("displayName"),
|
|
154
|
+
"User ID": p.get("id"),
|
|
155
|
+
"Type": p.get("type"),
|
|
156
|
+
"Role": v.get("role"),
|
|
157
|
+
"Email Address": p.get("userDetails", {}).get("userPrincipalName"),
|
|
158
|
+
}
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
if rows:
|
|
162
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
162
163
|
|
|
163
164
|
return df
|
|
164
165
|
|