semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (131) hide show
  1. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +5 -4
  2. semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
  3. sempy_labs/__init__.py +56 -56
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_ai.py +1 -1
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +5 -5
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +2 -2
  10. sempy_labs/_dashboards.py +16 -16
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +101 -26
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_dax_query_view.py +1 -1
  15. sempy_labs/_delta_analyzer.py +4 -4
  16. sempy_labs/_delta_analyzer_history.py +1 -1
  17. sempy_labs/_deployment_pipelines.py +1 -1
  18. sempy_labs/_environments.py +22 -21
  19. sempy_labs/_eventhouses.py +12 -11
  20. sempy_labs/_eventstreams.py +12 -11
  21. sempy_labs/_external_data_shares.py +23 -22
  22. sempy_labs/_gateways.py +47 -45
  23. sempy_labs/_generate_semantic_model.py +3 -3
  24. sempy_labs/_git.py +1 -1
  25. sempy_labs/_graphQL.py +12 -11
  26. sempy_labs/_job_scheduler.py +56 -54
  27. sempy_labs/_kql_databases.py +16 -17
  28. sempy_labs/_kql_querysets.py +12 -11
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_list_functions.py +1 -1
  31. sempy_labs/_managed_private_endpoints.py +18 -15
  32. sempy_labs/_mirrored_databases.py +16 -15
  33. sempy_labs/_mirrored_warehouses.py +12 -11
  34. sempy_labs/_ml_experiments.py +11 -10
  35. sempy_labs/_ml_models.py +11 -10
  36. sempy_labs/_model_auto_build.py +3 -3
  37. sempy_labs/_model_bpa.py +5 -5
  38. sempy_labs/_model_bpa_bulk.py +3 -3
  39. sempy_labs/_model_dependencies.py +1 -1
  40. sempy_labs/_mounted_data_factories.py +12 -12
  41. sempy_labs/_notebooks.py +1 -1
  42. sempy_labs/_one_lake_integration.py +1 -1
  43. sempy_labs/_query_scale_out.py +1 -1
  44. sempy_labs/_refresh_semantic_model.py +1 -1
  45. sempy_labs/_semantic_models.py +30 -28
  46. sempy_labs/_spark.py +1 -1
  47. sempy_labs/_sql.py +1 -1
  48. sempy_labs/_sql_endpoints.py +12 -11
  49. sempy_labs/_sqldatabase.py +15 -15
  50. sempy_labs/_tags.py +11 -10
  51. sempy_labs/_translations.py +1 -1
  52. sempy_labs/_user_delegation_key.py +2 -2
  53. sempy_labs/_variable_libraries.py +13 -12
  54. sempy_labs/_vertipaq.py +3 -3
  55. sempy_labs/_vpax.py +1 -1
  56. sempy_labs/_warehouses.py +15 -14
  57. sempy_labs/_workloads.py +1 -1
  58. sempy_labs/_workspace_identity.py +1 -1
  59. sempy_labs/_workspaces.py +14 -13
  60. sempy_labs/admin/__init__.py +18 -18
  61. sempy_labs/admin/_activities.py +46 -46
  62. sempy_labs/admin/_apps.py +28 -26
  63. sempy_labs/admin/_artifacts.py +15 -15
  64. sempy_labs/admin/_basic_functions.py +1 -2
  65. sempy_labs/admin/_capacities.py +84 -82
  66. sempy_labs/admin/_dataflows.py +2 -2
  67. sempy_labs/admin/_datasets.py +50 -48
  68. sempy_labs/admin/_domains.py +25 -19
  69. sempy_labs/admin/_external_data_share.py +24 -22
  70. sempy_labs/admin/_git.py +17 -17
  71. sempy_labs/admin/_items.py +47 -45
  72. sempy_labs/admin/_reports.py +61 -58
  73. sempy_labs/admin/_scanner.py +2 -2
  74. sempy_labs/admin/_shared.py +18 -18
  75. sempy_labs/admin/_tags.py +2 -2
  76. sempy_labs/admin/_tenant.py +57 -51
  77. sempy_labs/admin/_users.py +16 -15
  78. sempy_labs/admin/_workspaces.py +2 -2
  79. sempy_labs/directlake/__init__.py +12 -12
  80. sempy_labs/directlake/_directlake_schema_compare.py +3 -3
  81. sempy_labs/directlake/_directlake_schema_sync.py +9 -7
  82. sempy_labs/directlake/_dl_helper.py +1 -1
  83. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  84. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  85. sempy_labs/directlake/_guardrails.py +1 -1
  86. sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
  90. sempy_labs/directlake/_warm_cache.py +3 -3
  91. sempy_labs/graph/__init__.py +3 -3
  92. sempy_labs/graph/_groups.py +81 -78
  93. sempy_labs/graph/_teams.py +21 -21
  94. sempy_labs/graph/_users.py +111 -10
  95. sempy_labs/lakehouse/__init__.py +7 -7
  96. sempy_labs/lakehouse/_blobs.py +30 -30
  97. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  98. sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
  99. sempy_labs/lakehouse/_helper.py +30 -2
  100. sempy_labs/lakehouse/_lakehouse.py +2 -2
  101. sempy_labs/lakehouse/_livy_sessions.py +47 -42
  102. sempy_labs/lakehouse/_shortcuts.py +22 -21
  103. sempy_labs/migration/__init__.py +8 -8
  104. sempy_labs/migration/_create_pqt_file.py +2 -2
  105. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
  106. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
  107. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
  108. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
  109. sempy_labs/migration/_migration_validation.py +1 -2
  110. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  111. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
  112. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
  113. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
  114. sempy_labs/report/__init__.py +10 -10
  115. sempy_labs/report/_download_report.py +2 -2
  116. sempy_labs/report/_export_report.py +2 -2
  117. sempy_labs/report/_generate_report.py +1 -1
  118. sempy_labs/report/_paginated.py +1 -1
  119. sempy_labs/report/_report_bpa.py +4 -3
  120. sempy_labs/report/_report_functions.py +3 -3
  121. sempy_labs/report/_report_list_functions.py +3 -3
  122. sempy_labs/report/_report_rebind.py +1 -1
  123. sempy_labs/report/_reportwrapper.py +247 -249
  124. sempy_labs/report/_save_report.py +3 -3
  125. sempy_labs/theme/_org_themes.py +19 -6
  126. sempy_labs/tom/__init__.py +1 -1
  127. sempy_labs/tom/_model.py +5 -5
  128. semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
  129. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
  130. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
  131. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  import pandas as pd
2
- from sempy_labs._helper_functions import (
2
+ from .._helper_functions import (
3
3
  _base_api,
4
4
  _create_dataframe,
5
5
  )
@@ -57,26 +57,26 @@ def list_widely_shared_artifacts(
57
57
  uses_pagination=True,
58
58
  )
59
59
 
60
- dfs = []
60
+ rows = []
61
61
  for r in responses:
62
62
  for v in r.get("ArtifactAccessEntities", []):
63
63
  sharer = v.get("sharer", {})
64
- new_data = {
65
- "Artifact Id": v.get("artifactId"),
66
- "Artifact Name": v.get("displayName"),
67
- "Artifact Type": v.get("artifactType"),
68
- "Access Right": v.get("accessRight"),
69
- "Share Type": v.get("shareType"),
70
- "Sharer Name": sharer.get("displayName"),
71
- "Sharer Email Address": sharer.get("emailAddress"),
72
- "Sharer Identifier": sharer.get("identifier"),
73
- "Sharer Graph Id": sharer.get("graphId"),
74
- "Sharer Principal Type": sharer.get("principalType"),
75
- }
64
+ rows.append(
65
+ {
66
+ "Artifact Id": v.get("artifactId"),
67
+ "Artifact Name": v.get("displayName"),
68
+ "Artifact Type": v.get("artifactType"),
69
+ "Access Right": v.get("accessRight"),
70
+ "Share Type": v.get("shareType"),
71
+ "Sharer Name": sharer.get("displayName"),
72
+ "Sharer Email Address": sharer.get("emailAddress"),
73
+ "Sharer Identifier": sharer.get("identifier"),
74
+ "Sharer Graph Id": sharer.get("graphId"),
75
+ "Sharer Principal Type": sharer.get("principalType"),
76
+ }
77
+ )
76
78
 
77
- dfs.append(pd.DataFrame(new_data, index=[0]))
78
-
79
- if dfs:
80
- df = pd.concat(dfs, ignore_index=True)
79
+ if rows:
80
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
81
81
 
82
82
  return df
sempy_labs/admin/_tags.py CHANGED
@@ -1,9 +1,9 @@
1
- from sempy_labs._helper_functions import (
1
+ from .._helper_functions import (
2
2
  _base_api,
3
3
  _is_valid_uuid,
4
4
  )
5
5
  from uuid import UUID
6
- from sempy_labs._tags import list_tags
6
+ from .._tags import list_tags
7
7
  import sempy_labs._icons as icons
8
8
  from typing import List
9
9
  from sempy._utils._log import log
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from .._helper_functions import (
2
2
  _update_dataframe_datatypes,
3
3
  _base_api,
4
4
  _create_dataframe,
@@ -6,7 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  from sempy._utils._log import log
7
7
  import pandas as pd
8
8
  from uuid import UUID
9
- from sempy_labs.admin._capacities import _resolve_capacity_name_and_id
9
+ from ._capacities import _resolve_capacity_name_and_id
10
10
  import sempy_labs._icons as icons
11
11
  from typing import Optional, List
12
12
 
@@ -38,20 +38,21 @@ def list_tenant_settings() -> pd.DataFrame:
38
38
 
39
39
  response = _base_api(request="/v1/admin/tenantsettings", client="fabric_sp")
40
40
 
41
- dfs = []
41
+ rows = []
42
42
  for i in response.json().get("value", []):
43
- new_data = {
44
- "Setting Name": i.get("settingName"),
45
- "Title": i.get("title"),
46
- "Enabled": i.get("enabled"),
47
- "Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
48
- "Tenant Setting Group": i.get("tenantSettingGroup"),
49
- "Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
50
- }
51
- dfs.append(pd.DataFrame(new_data, index=[0]))
43
+ rows.append(
44
+ {
45
+ "Setting Name": i.get("settingName"),
46
+ "Title": i.get("title"),
47
+ "Enabled": i.get("enabled"),
48
+ "Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
49
+ "Tenant Setting Group": i.get("tenantSettingGroup"),
50
+ "Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
51
+ }
52
+ )
52
53
 
53
- if dfs:
54
- df = pd.concat(dfs, ignore_index=True)
54
+ if rows:
55
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
55
56
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
56
57
 
57
58
  return df
@@ -410,28 +411,31 @@ def list_workspaces_tenant_settings_overrides() -> pd.DataFrame:
410
411
  uses_pagination=True,
411
412
  )
412
413
 
414
+ rows = []
413
415
  for r in responses:
414
416
  for v in r.get("value", []):
415
417
  workspace_id = v.get("id")
416
418
  for setting in v.get("tenantSettings", []):
417
- new_data = {
418
- "Workspace Id": workspace_id,
419
- "Setting Name": setting.get("settingName"),
420
- "Title": setting.get("title"),
421
- "Enabled": setting.get("enabled"),
422
- "Can Specify Security Groups": setting.get(
423
- "canSpecifySecurityGroups"
424
- ),
425
- "Enabled Security Groups": [
426
- setting.get("enabledSecurityGroups", [])
427
- ],
428
- "Tenant Setting Group": setting.get("tenantSettingGroup"),
429
- "Delegated From": setting.get("delegatedFrom"),
430
- }
431
-
432
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
433
-
434
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
419
+ rows.append(
420
+ {
421
+ "Workspace Id": workspace_id,
422
+ "Setting Name": setting.get("settingName"),
423
+ "Title": setting.get("title"),
424
+ "Enabled": setting.get("enabled"),
425
+ "Can Specify Security Groups": setting.get(
426
+ "canSpecifySecurityGroups"
427
+ ),
428
+ "Enabled Security Groups": [
429
+ setting.get("enabledSecurityGroups", [])
430
+ ],
431
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
432
+ "Delegated From": setting.get("delegatedFrom"),
433
+ }
434
+ )
435
+
436
+ if rows:
437
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
438
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
435
439
 
436
440
  return df
437
441
 
@@ -470,28 +474,30 @@ def list_domain_tenant_settings_overrides() -> pd.DataFrame:
470
474
  uses_pagination=True,
471
475
  )
472
476
 
477
+ rows = []
473
478
  for r in responses:
474
479
  for v in r.get("value", []):
475
480
  domain_id = v.get("id")
476
481
  for setting in v.get("tenantSettings", []):
477
- new_data = {
478
- "Domain Id": domain_id,
479
- "Setting Name": setting.get("settingName"),
480
- "Title": setting.get("title"),
481
- "Enabled": setting.get("enabled"),
482
- "Can Specify Security Groups": setting.get(
483
- "canSpecifySecurityGroups"
484
- ),
485
- "Enabled Security Groups": [
486
- setting.get("enabledSecurityGroups", [])
487
- ],
488
- "Tenant Setting Group": setting.get("tenantSettingGroup"),
489
- "Delegated To Workspace": setting.get("delegateToWorkspace"),
490
- "Delegated From": setting.get("delegatedFrom"),
491
- }
492
-
493
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
494
-
495
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
482
+ rows.append(
483
+ {
484
+ "Domain Id": domain_id,
485
+ "Setting Name": setting.get("settingName"),
486
+ "Title": setting.get("title"),
487
+ "Enabled": setting.get("enabled"),
488
+ "Can Specify Security Groups": setting.get(
489
+ "canSpecifySecurityGroups"
490
+ ),
491
+ "Enabled Security Groups": [
492
+ setting.get("enabledSecurityGroups", [])
493
+ ],
494
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
495
+ "Delegated To Workspace": setting.get("delegateToWorkspace"),
496
+ "Delegated From": setting.get("delegatedFrom"),
497
+ }
498
+ )
499
+ if rows:
500
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
501
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
496
502
 
497
503
  return df
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from .._helper_functions import (
2
2
  _base_api,
3
3
  _create_dataframe,
4
4
  _update_dataframe_datatypes,
@@ -45,22 +45,23 @@ def list_access_entities(
45
45
  uses_pagination=True,
46
46
  )
47
47
 
48
- dfs = []
48
+ rows = []
49
49
  for r in responses:
50
50
  for v in r.get("accessEntities", []):
51
- new_data = {
52
- "Item Id": v.get("id"),
53
- "Item Name": v.get("displayName"),
54
- "Item Type": v.get("itemAccessDetails", {}).get("type"),
55
- "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
56
- "Additional Permissions": v.get("itemAccessDetails", {}).get(
57
- "additionalPermissions"
58
- ),
59
- }
60
- dfs.append(pd.DataFrame(new_data, index=[0]))
61
-
62
- if dfs:
63
- df = pd.concat(dfs, ignore_index=True)
51
+ rows.append(
52
+ {
53
+ "Item Id": v.get("id"),
54
+ "Item Name": v.get("displayName"),
55
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
56
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
57
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
58
+ "additionalPermissions"
59
+ ),
60
+ }
61
+ )
62
+
63
+ if rows:
64
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
64
65
 
65
66
  return df
66
67
 
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from .._helper_functions import (
2
2
  _base_api,
3
3
  _build_url,
4
4
  _encode_user,
@@ -8,7 +8,7 @@ from sempy_labs._helper_functions import (
8
8
 
9
9
  from uuid import UUID
10
10
  from typing import Optional
11
- from sempy_labs.admin._basic_functions import (
11
+ from ._basic_functions import (
12
12
  _resolve_workspace_name_and_id,
13
13
  )
14
14
  import sempy_labs._icons as icons
@@ -1,33 +1,33 @@
1
- from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
2
- from sempy_labs.directlake._directlake_schema_compare import direct_lake_schema_compare
3
- from sempy_labs.directlake._directlake_schema_sync import direct_lake_schema_sync
4
- from sempy_labs.directlake._dl_helper import (
1
+ from ._generate_shared_expression import generate_shared_expression
2
+ from ._directlake_schema_compare import direct_lake_schema_compare
3
+ from ._directlake_schema_sync import direct_lake_schema_sync
4
+ from ._dl_helper import (
5
5
  check_fallback_reason,
6
6
  generate_direct_lake_semantic_model,
7
7
  get_direct_lake_source,
8
8
  )
9
- from sempy_labs.directlake._get_directlake_lakehouse import get_direct_lake_lakehouse
10
- from sempy_labs.directlake._get_shared_expression import get_shared_expression
11
- from sempy_labs.directlake._guardrails import (
9
+ from ._get_directlake_lakehouse import get_direct_lake_lakehouse
10
+ from ._get_shared_expression import get_shared_expression
11
+ from ._guardrails import (
12
12
  get_direct_lake_guardrails,
13
13
  get_sku_size,
14
14
  get_directlake_guardrails_for_sku,
15
15
  )
16
- from sempy_labs.directlake._list_directlake_model_calc_tables import (
16
+ from ._list_directlake_model_calc_tables import (
17
17
  list_direct_lake_model_calc_tables,
18
18
  )
19
- from sempy_labs.directlake._show_unsupported_directlake_objects import (
19
+ from ._show_unsupported_directlake_objects import (
20
20
  show_unsupported_direct_lake_objects,
21
21
  )
22
- from sempy_labs.directlake._update_directlake_model_lakehouse_connection import (
22
+ from ._update_directlake_model_lakehouse_connection import (
23
23
  update_direct_lake_model_lakehouse_connection,
24
24
  update_direct_lake_model_connection,
25
25
  )
26
- from sempy_labs.directlake._update_directlake_partition_entity import (
26
+ from ._update_directlake_partition_entity import (
27
27
  update_direct_lake_partition_entity,
28
28
  add_table_to_direct_lake_semantic_model,
29
29
  )
30
- from sempy_labs.directlake._warm_cache import (
30
+ from ._warm_cache import (
31
31
  warm_direct_lake_cache_isresident,
32
32
  warm_direct_lake_cache_perspective,
33
33
  )
@@ -1,14 +1,14 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import (
3
+ from .._helper_functions import (
4
4
  format_dax_object_name,
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_dataset_name_and_id,
7
7
  resolve_workspace_name,
8
8
  )
9
9
  from IPython.display import display
10
- from sempy_labs.lakehouse import get_lakehouse_columns
11
- from sempy_labs.directlake._dl_helper import get_direct_lake_source
10
+ from ..lakehouse import get_lakehouse_columns
11
+ from ..directlake._dl_helper import get_direct_lake_source
12
12
  from typing import Optional
13
13
  import sempy_labs._icons as icons
14
14
  from sempy._utils._log import log
@@ -1,9 +1,9 @@
1
1
  import sempy
2
2
  import pandas as pd
3
- from sempy_labs.lakehouse import get_lakehouse_columns
4
- from sempy_labs.directlake._dl_helper import get_direct_lake_source
5
- from sempy_labs.tom import connect_semantic_model
6
- from sempy_labs._helper_functions import (
3
+ from ..lakehouse import get_lakehouse_columns
4
+ from ._dl_helper import get_direct_lake_source
5
+ from ..tom import connect_semantic_model
6
+ from .._helper_functions import (
7
7
  _convert_data_type,
8
8
  resolve_workspace_name_and_id,
9
9
  resolve_dataset_name_and_id,
@@ -83,6 +83,8 @@ def direct_lake_schema_sync(
83
83
  ) as tom:
84
84
  # Check if the columns in the semantic model exist in the lakehouse
85
85
  for c in tom.all_columns():
86
+ column_name = c.Name
87
+ table_name = c.Parent.Name
86
88
  partition_name = next(p.Name for p in c.Table.Partitions)
87
89
  p = c.Table.Partitions[partition_name]
88
90
  if p.SourceType == TOM.PartitionSourceType.Entity:
@@ -95,8 +97,8 @@ def direct_lake_schema_sync(
95
97
  # Remove column from model if it doesn't exist in the lakehouse
96
98
  if lc_filt.empty:
97
99
  new_data = {
98
- "TableName": c.Parent.Name,
99
- "ColumnName": c.Name,
100
+ "TableName": table_name,
101
+ "ColumnName": column_name,
100
102
  "SourceTableName": entity_name,
101
103
  "SourceColumnName": source_column,
102
104
  "Status": "Not in lakehouse",
@@ -107,7 +109,7 @@ def direct_lake_schema_sync(
107
109
  if remove_from_model:
108
110
  tom.remove_object(object=c)
109
111
  print(
110
- f"{icons.green_dot} The '{c.Parent.Name}'[{c.Name}] column has been removed from the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
112
+ f"{icons.green_dot} The '{table_name}'[{column_name}] column has been removed from the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
111
113
  )
112
114
 
113
115
  # Check if the lakehouse columns exist in the semantic model
@@ -5,7 +5,7 @@ from typing import Optional, List, Union, Tuple
5
5
  from uuid import UUID
6
6
  import sempy_labs._icons as icons
7
7
  from sempy._utils._log import log
8
- from sempy_labs._helper_functions import (
8
+ from .._helper_functions import (
9
9
  retry,
10
10
  _convert_data_type,
11
11
  resolve_dataset_name_and_id,
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from .._helper_functions import (
2
2
  resolve_workspace_name_and_id,
3
3
  _base_api,
4
4
  resolve_lakehouse_name_and_id,
@@ -1,5 +1,5 @@
1
1
  import sempy.fabric as fabric
2
- from sempy_labs._helper_functions import (
2
+ from .._helper_functions import (
3
3
  resolve_lakehouse_id,
4
4
  )
5
5
  from typing import Optional, Tuple
@@ -3,7 +3,7 @@ import pandas as pd
3
3
  from typing import Optional
4
4
  import sempy_labs._icons as icons
5
5
  from uuid import UUID
6
- from sempy_labs._helper_functions import (
6
+ from .._helper_functions import (
7
7
  resolve_workspace_name_and_id,
8
8
  )
9
9
  from sempy._utils._log import log
@@ -1,12 +1,12 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._list_functions import list_tables
4
- from sempy_labs.tom import connect_semantic_model
3
+ from .._list_functions import list_tables
4
+ from ..tom import connect_semantic_model
5
5
  from typing import Optional
6
6
  from sempy._utils._log import log
7
7
  import sempy_labs._icons as icons
8
8
  from uuid import UUID
9
- from sempy_labs._helper_functions import (
9
+ from .._helper_functions import (
10
10
  resolve_dataset_name_and_id,
11
11
  resolve_workspace_name_and_id,
12
12
  )
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import (
3
+ from .._helper_functions import (
4
4
  format_dax_object_name,
5
5
  resolve_dataset_name_and_id,
6
6
  resolve_workspace_name_and_id,
@@ -1,12 +1,12 @@
1
- from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
2
- from sempy_labs._helper_functions import (
1
+ from ._generate_shared_expression import generate_shared_expression
2
+ from .._helper_functions import (
3
3
  resolve_dataset_name_and_id,
4
4
  resolve_workspace_name_and_id,
5
5
  resolve_item_name_and_id,
6
6
  resolve_lakehouse_name_and_id,
7
7
  )
8
8
  from sempy._utils._log import log
9
- from sempy_labs.tom import connect_semantic_model
9
+ from ..tom import connect_semantic_model
10
10
  from typing import Optional, List
11
11
  import sempy_labs._icons as icons
12
12
  from uuid import UUID
@@ -1,8 +1,8 @@
1
1
  import sempy
2
- from sempy_labs.tom import connect_semantic_model
3
- from sempy_labs._refresh_semantic_model import refresh_semantic_model
4
- from sempy_labs.directlake._dl_helper import get_direct_lake_source
5
- from sempy_labs._helper_functions import (
2
+ from ..tom import connect_semantic_model
3
+ from .._refresh_semantic_model import refresh_semantic_model
4
+ from ._dl_helper import get_direct_lake_source
5
+ from .._helper_functions import (
6
6
  _convert_data_type,
7
7
  resolve_dataset_name_and_id,
8
8
  resolve_workspace_name_and_id,
@@ -3,13 +3,13 @@ import pandas as pd
3
3
  from tqdm.auto import tqdm
4
4
  import numpy as np
5
5
  import time
6
- from sempy_labs._helper_functions import (
6
+ from .._helper_functions import (
7
7
  format_dax_object_name,
8
8
  resolve_dataset_name_and_id,
9
9
  resolve_workspace_name_and_id,
10
10
  )
11
- from sempy_labs._refresh_semantic_model import refresh_semantic_model
12
- from sempy_labs._model_dependencies import get_measure_dependencies
11
+ from .._refresh_semantic_model import refresh_semantic_model
12
+ from .._model_dependencies import get_measure_dependencies
13
13
  from typing import Optional
14
14
  from sempy._utils._log import log
15
15
  import sempy_labs._icons as icons
@@ -1,4 +1,4 @@
1
- from sempy_labs.graph._groups import (
1
+ from ._groups import (
2
2
  list_groups,
3
3
  list_group_owners,
4
4
  list_group_members,
@@ -7,13 +7,13 @@ from sempy_labs.graph._groups import (
7
7
  resolve_group_id,
8
8
  renew_group,
9
9
  )
10
- from sempy_labs.graph._users import (
10
+ from ._users import (
11
11
  resolve_user_id,
12
12
  get_user,
13
13
  list_users,
14
14
  send_mail,
15
15
  )
16
- from sempy_labs.graph._teams import (
16
+ from ._teams import (
17
17
  list_teams,
18
18
  )
19
19