semantic-link-labs 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +6 -4
- semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
- sempy_labs/__init__.py +56 -56
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_dax_query_view.py +1 -1
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +23 -22
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_ml_models.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +1 -1
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_variable_libraries.py +13 -12
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +86 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +1 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +111 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +30 -2
- sempy_labs/lakehouse/_lakehouse.py +2 -2
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +247 -249
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +35 -1
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +23 -20
- semantic_link_labs-0.11.0.dist-info/RECORD +0 -210
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
sempy_labs/admin/_shared.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
|
-
from
|
|
2
|
+
from .._helper_functions import (
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
5
5
|
)
|
|
@@ -57,26 +57,26 @@ def list_widely_shared_artifacts(
|
|
|
57
57
|
uses_pagination=True,
|
|
58
58
|
)
|
|
59
59
|
|
|
60
|
-
|
|
60
|
+
rows = []
|
|
61
61
|
for r in responses:
|
|
62
62
|
for v in r.get("ArtifactAccessEntities", []):
|
|
63
63
|
sharer = v.get("sharer", {})
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
64
|
+
rows.append(
|
|
65
|
+
{
|
|
66
|
+
"Artifact Id": v.get("artifactId"),
|
|
67
|
+
"Artifact Name": v.get("displayName"),
|
|
68
|
+
"Artifact Type": v.get("artifactType"),
|
|
69
|
+
"Access Right": v.get("accessRight"),
|
|
70
|
+
"Share Type": v.get("shareType"),
|
|
71
|
+
"Sharer Name": sharer.get("displayName"),
|
|
72
|
+
"Sharer Email Address": sharer.get("emailAddress"),
|
|
73
|
+
"Sharer Identifier": sharer.get("identifier"),
|
|
74
|
+
"Sharer Graph Id": sharer.get("graphId"),
|
|
75
|
+
"Sharer Principal Type": sharer.get("principalType"),
|
|
76
|
+
}
|
|
77
|
+
)
|
|
76
78
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
if dfs:
|
|
80
|
-
df = pd.concat(dfs, ignore_index=True)
|
|
79
|
+
if rows:
|
|
80
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
81
81
|
|
|
82
82
|
return df
|
sempy_labs/admin/_tags.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
from
|
|
1
|
+
from .._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_is_valid_uuid,
|
|
4
4
|
)
|
|
5
5
|
from uuid import UUID
|
|
6
|
-
from
|
|
6
|
+
from .._tags import list_tags
|
|
7
7
|
import sempy_labs._icons as icons
|
|
8
8
|
from typing import List
|
|
9
9
|
from sempy._utils._log import log
|
sempy_labs/admin/_tenant.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from .._helper_functions import (
|
|
2
2
|
_update_dataframe_datatypes,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -6,7 +6,7 @@ from sempy_labs._helper_functions import (
|
|
|
6
6
|
from sempy._utils._log import log
|
|
7
7
|
import pandas as pd
|
|
8
8
|
from uuid import UUID
|
|
9
|
-
from
|
|
9
|
+
from ._capacities import _resolve_capacity_name_and_id
|
|
10
10
|
import sempy_labs._icons as icons
|
|
11
11
|
from typing import Optional, List
|
|
12
12
|
|
|
@@ -38,20 +38,21 @@ def list_tenant_settings() -> pd.DataFrame:
|
|
|
38
38
|
|
|
39
39
|
response = _base_api(request="/v1/admin/tenantsettings", client="fabric_sp")
|
|
40
40
|
|
|
41
|
-
|
|
41
|
+
rows = []
|
|
42
42
|
for i in response.json().get("value", []):
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
43
|
+
rows.append(
|
|
44
|
+
{
|
|
45
|
+
"Setting Name": i.get("settingName"),
|
|
46
|
+
"Title": i.get("title"),
|
|
47
|
+
"Enabled": i.get("enabled"),
|
|
48
|
+
"Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
|
|
49
|
+
"Tenant Setting Group": i.get("tenantSettingGroup"),
|
|
50
|
+
"Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
|
|
51
|
+
}
|
|
52
|
+
)
|
|
52
53
|
|
|
53
|
-
if
|
|
54
|
-
df = pd.
|
|
54
|
+
if rows:
|
|
55
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
55
56
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
56
57
|
|
|
57
58
|
return df
|
|
@@ -410,28 +411,31 @@ def list_workspaces_tenant_settings_overrides() -> pd.DataFrame:
|
|
|
410
411
|
uses_pagination=True,
|
|
411
412
|
)
|
|
412
413
|
|
|
414
|
+
rows = []
|
|
413
415
|
for r in responses:
|
|
414
416
|
for v in r.get("value", []):
|
|
415
417
|
workspace_id = v.get("id")
|
|
416
418
|
for setting in v.get("tenantSettings", []):
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
"
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
419
|
+
rows.append(
|
|
420
|
+
{
|
|
421
|
+
"Workspace Id": workspace_id,
|
|
422
|
+
"Setting Name": setting.get("settingName"),
|
|
423
|
+
"Title": setting.get("title"),
|
|
424
|
+
"Enabled": setting.get("enabled"),
|
|
425
|
+
"Can Specify Security Groups": setting.get(
|
|
426
|
+
"canSpecifySecurityGroups"
|
|
427
|
+
),
|
|
428
|
+
"Enabled Security Groups": [
|
|
429
|
+
setting.get("enabledSecurityGroups", [])
|
|
430
|
+
],
|
|
431
|
+
"Tenant Setting Group": setting.get("tenantSettingGroup"),
|
|
432
|
+
"Delegated From": setting.get("delegatedFrom"),
|
|
433
|
+
}
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
if rows:
|
|
437
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
438
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
435
439
|
|
|
436
440
|
return df
|
|
437
441
|
|
|
@@ -470,28 +474,30 @@ def list_domain_tenant_settings_overrides() -> pd.DataFrame:
|
|
|
470
474
|
uses_pagination=True,
|
|
471
475
|
)
|
|
472
476
|
|
|
477
|
+
rows = []
|
|
473
478
|
for r in responses:
|
|
474
479
|
for v in r.get("value", []):
|
|
475
480
|
domain_id = v.get("id")
|
|
476
481
|
for setting in v.get("tenantSettings", []):
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
"
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
482
|
+
rows.append(
|
|
483
|
+
{
|
|
484
|
+
"Domain Id": domain_id,
|
|
485
|
+
"Setting Name": setting.get("settingName"),
|
|
486
|
+
"Title": setting.get("title"),
|
|
487
|
+
"Enabled": setting.get("enabled"),
|
|
488
|
+
"Can Specify Security Groups": setting.get(
|
|
489
|
+
"canSpecifySecurityGroups"
|
|
490
|
+
),
|
|
491
|
+
"Enabled Security Groups": [
|
|
492
|
+
setting.get("enabledSecurityGroups", [])
|
|
493
|
+
],
|
|
494
|
+
"Tenant Setting Group": setting.get("tenantSettingGroup"),
|
|
495
|
+
"Delegated To Workspace": setting.get("delegateToWorkspace"),
|
|
496
|
+
"Delegated From": setting.get("delegatedFrom"),
|
|
497
|
+
}
|
|
498
|
+
)
|
|
499
|
+
if rows:
|
|
500
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
501
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
496
502
|
|
|
497
503
|
return df
|
sempy_labs/admin/_users.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from .._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_create_dataframe,
|
|
4
4
|
_update_dataframe_datatypes,
|
|
@@ -45,22 +45,23 @@ def list_access_entities(
|
|
|
45
45
|
uses_pagination=True,
|
|
46
46
|
)
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
rows = []
|
|
49
49
|
for r in responses:
|
|
50
50
|
for v in r.get("accessEntities", []):
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
"
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
51
|
+
rows.append(
|
|
52
|
+
{
|
|
53
|
+
"Item Id": v.get("id"),
|
|
54
|
+
"Item Name": v.get("displayName"),
|
|
55
|
+
"Item Type": v.get("itemAccessDetails", {}).get("type"),
|
|
56
|
+
"Permissions": v.get("itemAccessDetails", {}).get("permissions"),
|
|
57
|
+
"Additional Permissions": v.get("itemAccessDetails", {}).get(
|
|
58
|
+
"additionalPermissions"
|
|
59
|
+
),
|
|
60
|
+
}
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if rows:
|
|
64
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
64
65
|
|
|
65
66
|
return df
|
|
66
67
|
|
sempy_labs/admin/_workspaces.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from .._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_build_url,
|
|
4
4
|
_encode_user,
|
|
@@ -8,7 +8,7 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
|
|
9
9
|
from uuid import UUID
|
|
10
10
|
from typing import Optional
|
|
11
|
-
from
|
|
11
|
+
from ._basic_functions import (
|
|
12
12
|
_resolve_workspace_name_and_id,
|
|
13
13
|
)
|
|
14
14
|
import sempy_labs._icons as icons
|
|
@@ -1,33 +1,33 @@
|
|
|
1
|
-
from
|
|
2
|
-
from
|
|
3
|
-
from
|
|
4
|
-
from
|
|
1
|
+
from ._generate_shared_expression import generate_shared_expression
|
|
2
|
+
from ._directlake_schema_compare import direct_lake_schema_compare
|
|
3
|
+
from ._directlake_schema_sync import direct_lake_schema_sync
|
|
4
|
+
from ._dl_helper import (
|
|
5
5
|
check_fallback_reason,
|
|
6
6
|
generate_direct_lake_semantic_model,
|
|
7
7
|
get_direct_lake_source,
|
|
8
8
|
)
|
|
9
|
-
from
|
|
10
|
-
from
|
|
11
|
-
from
|
|
9
|
+
from ._get_directlake_lakehouse import get_direct_lake_lakehouse
|
|
10
|
+
from ._get_shared_expression import get_shared_expression
|
|
11
|
+
from ._guardrails import (
|
|
12
12
|
get_direct_lake_guardrails,
|
|
13
13
|
get_sku_size,
|
|
14
14
|
get_directlake_guardrails_for_sku,
|
|
15
15
|
)
|
|
16
|
-
from
|
|
16
|
+
from ._list_directlake_model_calc_tables import (
|
|
17
17
|
list_direct_lake_model_calc_tables,
|
|
18
18
|
)
|
|
19
|
-
from
|
|
19
|
+
from ._show_unsupported_directlake_objects import (
|
|
20
20
|
show_unsupported_direct_lake_objects,
|
|
21
21
|
)
|
|
22
|
-
from
|
|
22
|
+
from ._update_directlake_model_lakehouse_connection import (
|
|
23
23
|
update_direct_lake_model_lakehouse_connection,
|
|
24
24
|
update_direct_lake_model_connection,
|
|
25
25
|
)
|
|
26
|
-
from
|
|
26
|
+
from ._update_directlake_partition_entity import (
|
|
27
27
|
update_direct_lake_partition_entity,
|
|
28
28
|
add_table_to_direct_lake_semantic_model,
|
|
29
29
|
)
|
|
30
|
-
from
|
|
30
|
+
from ._warm_cache import (
|
|
31
31
|
warm_direct_lake_cache_isresident,
|
|
32
32
|
warm_direct_lake_cache_perspective,
|
|
33
33
|
)
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
3
|
+
from .._helper_functions import (
|
|
4
4
|
format_dax_object_name,
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_dataset_name_and_id,
|
|
7
7
|
resolve_workspace_name,
|
|
8
8
|
)
|
|
9
9
|
from IPython.display import display
|
|
10
|
-
from
|
|
11
|
-
from
|
|
10
|
+
from ..lakehouse import get_lakehouse_columns
|
|
11
|
+
from ..directlake._dl_helper import get_direct_lake_source
|
|
12
12
|
from typing import Optional
|
|
13
13
|
import sempy_labs._icons as icons
|
|
14
14
|
from sempy._utils._log import log
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
from
|
|
3
|
+
from ..lakehouse import get_lakehouse_columns
|
|
4
|
+
from ._dl_helper import get_direct_lake_source
|
|
5
|
+
from ..tom import connect_semantic_model
|
|
6
|
+
from .._helper_functions import (
|
|
7
7
|
_convert_data_type,
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
9
|
resolve_dataset_name_and_id,
|
|
@@ -83,6 +83,8 @@ def direct_lake_schema_sync(
|
|
|
83
83
|
) as tom:
|
|
84
84
|
# Check if the columns in the semantic model exist in the lakehouse
|
|
85
85
|
for c in tom.all_columns():
|
|
86
|
+
column_name = c.Name
|
|
87
|
+
table_name = c.Parent.Name
|
|
86
88
|
partition_name = next(p.Name for p in c.Table.Partitions)
|
|
87
89
|
p = c.Table.Partitions[partition_name]
|
|
88
90
|
if p.SourceType == TOM.PartitionSourceType.Entity:
|
|
@@ -95,8 +97,8 @@ def direct_lake_schema_sync(
|
|
|
95
97
|
# Remove column from model if it doesn't exist in the lakehouse
|
|
96
98
|
if lc_filt.empty:
|
|
97
99
|
new_data = {
|
|
98
|
-
"TableName":
|
|
99
|
-
"ColumnName":
|
|
100
|
+
"TableName": table_name,
|
|
101
|
+
"ColumnName": column_name,
|
|
100
102
|
"SourceTableName": entity_name,
|
|
101
103
|
"SourceColumnName": source_column,
|
|
102
104
|
"Status": "Not in lakehouse",
|
|
@@ -107,7 +109,7 @@ def direct_lake_schema_sync(
|
|
|
107
109
|
if remove_from_model:
|
|
108
110
|
tom.remove_object(object=c)
|
|
109
111
|
print(
|
|
110
|
-
f"{icons.green_dot} The '{
|
|
112
|
+
f"{icons.green_dot} The '{table_name}'[{column_name}] column has been removed from the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
111
113
|
)
|
|
112
114
|
|
|
113
115
|
# Check if the lakehouse columns exist in the semantic model
|
|
@@ -5,7 +5,7 @@ from typing import Optional, List, Union, Tuple
|
|
|
5
5
|
from uuid import UUID
|
|
6
6
|
import sempy_labs._icons as icons
|
|
7
7
|
from sempy._utils._log import log
|
|
8
|
-
from
|
|
8
|
+
from .._helper_functions import (
|
|
9
9
|
retry,
|
|
10
10
|
_convert_data_type,
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
4
|
-
from
|
|
3
|
+
from .._list_functions import list_tables
|
|
4
|
+
from ..tom import connect_semantic_model
|
|
5
5
|
from typing import Optional
|
|
6
6
|
from sempy._utils._log import log
|
|
7
7
|
import sempy_labs._icons as icons
|
|
8
8
|
from uuid import UUID
|
|
9
|
-
from
|
|
9
|
+
from .._helper_functions import (
|
|
10
10
|
resolve_dataset_name_and_id,
|
|
11
11
|
resolve_workspace_name_and_id,
|
|
12
12
|
)
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
from
|
|
2
|
-
from
|
|
1
|
+
from ._generate_shared_expression import generate_shared_expression
|
|
2
|
+
from .._helper_functions import (
|
|
3
3
|
resolve_dataset_name_and_id,
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
5
|
resolve_item_name_and_id,
|
|
6
6
|
resolve_lakehouse_name_and_id,
|
|
7
7
|
)
|
|
8
8
|
from sempy._utils._log import log
|
|
9
|
-
from
|
|
9
|
+
from ..tom import connect_semantic_model
|
|
10
10
|
from typing import Optional, List
|
|
11
11
|
import sempy_labs._icons as icons
|
|
12
12
|
from uuid import UUID
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import sempy
|
|
2
|
-
from
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
2
|
+
from ..tom import connect_semantic_model
|
|
3
|
+
from .._refresh_semantic_model import refresh_semantic_model
|
|
4
|
+
from ._dl_helper import get_direct_lake_source
|
|
5
|
+
from .._helper_functions import (
|
|
6
6
|
_convert_data_type,
|
|
7
7
|
resolve_dataset_name_and_id,
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
@@ -3,13 +3,13 @@ import pandas as pd
|
|
|
3
3
|
from tqdm.auto import tqdm
|
|
4
4
|
import numpy as np
|
|
5
5
|
import time
|
|
6
|
-
from
|
|
6
|
+
from .._helper_functions import (
|
|
7
7
|
format_dax_object_name,
|
|
8
8
|
resolve_dataset_name_and_id,
|
|
9
9
|
resolve_workspace_name_and_id,
|
|
10
10
|
)
|
|
11
|
-
from
|
|
12
|
-
from
|
|
11
|
+
from .._refresh_semantic_model import refresh_semantic_model
|
|
12
|
+
from .._model_dependencies import get_measure_dependencies
|
|
13
13
|
from typing import Optional
|
|
14
14
|
from sempy._utils._log import log
|
|
15
15
|
import sempy_labs._icons as icons
|
sempy_labs/graph/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._groups import (
|
|
2
2
|
list_groups,
|
|
3
3
|
list_group_owners,
|
|
4
4
|
list_group_members,
|
|
@@ -7,13 +7,13 @@ from sempy_labs.graph._groups import (
|
|
|
7
7
|
resolve_group_id,
|
|
8
8
|
renew_group,
|
|
9
9
|
)
|
|
10
|
-
from
|
|
10
|
+
from ._users import (
|
|
11
11
|
resolve_user_id,
|
|
12
12
|
get_user,
|
|
13
13
|
list_users,
|
|
14
14
|
send_mail,
|
|
15
15
|
)
|
|
16
|
-
from
|
|
16
|
+
from ._teams import (
|
|
17
17
|
list_teams,
|
|
18
18
|
)
|
|
19
19
|
|