semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -3,46 +3,49 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_id,
4
4
  resolve_lakehouse_name,
5
5
  get_direct_lake_sql_endpoint,
6
+ resolve_workspace_name_and_id,
7
+ resolve_dataset_name_and_id,
6
8
  )
7
9
  from typing import Optional, Tuple
8
10
  from uuid import UUID
9
11
 
10
12
 
11
13
  def get_direct_lake_lakehouse(
12
- dataset: str,
13
- workspace: Optional[str] = None,
14
+ dataset: str | UUID,
15
+ workspace: Optional[str | UUID] = None,
14
16
  lakehouse: Optional[str] = None,
15
- lakehouse_workspace: Optional[str] = None,
17
+ lakehouse_workspace: Optional[str | UUID] = None,
16
18
  ) -> Tuple[str, UUID]:
17
19
  """
18
20
  Identifies the lakehouse used by a Direct Lake semantic model.
19
21
 
20
22
  Parameters
21
23
  ----------
22
- dataset : str
23
- Name of the semantic model.
24
- workspace : str, default=None
25
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
26
28
  Defaults to None which resolves to the workspace of the attached lakehouse
27
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
28
30
  lakehouse : str, default=None
29
31
  The Fabric lakehouse used by the Direct Lake semantic model.
30
32
  Defaults to None which resolves to the lakehouse attached to the notebook.
31
- lakehouse_workspace : str, default=None
32
- The Fabric workspace used by the lakehouse.
33
+ lakehouse_workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID used by the lakehouse.
33
35
  Defaults to None which resolves to the workspace of the attached lakehouse
34
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
37
 
36
38
  Returns
37
39
  -------
38
- str, uuid.UUID
40
+ Tuple[str, uuid.UUID]
39
41
  The lakehouse name and lakehouse ID.
40
42
  """
41
43
 
42
- workspace = fabric.resolve_workspace_name(workspace)
44
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
45
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
43
46
 
44
47
  if lakehouse_workspace is None:
45
- lakehouse_workspace = workspace
48
+ lakehouse_workspace = workspace_name
46
49
 
47
50
  if lakehouse is None:
48
51
  lakehouse_id = fabric.get_lakehouse_id()
@@ -56,7 +59,7 @@ def get_direct_lake_lakehouse(
56
59
  # f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
57
60
  # )
58
61
 
59
- sqlEndpointId = get_direct_lake_sql_endpoint(dataset, workspace)
62
+ sqlEndpointId = get_direct_lake_sql_endpoint(dataset_id, workspace_id)
60
63
 
61
64
  dfI = fabric.list_items(workspace=lakehouse_workspace, type="SQLEndpoint")
62
65
  dfI_filt = dfI[dfI["Id"] == sqlEndpointId]
@@ -1,8 +1,9 @@
1
1
  from typing import Optional
2
+ from uuid import UUID
2
3
 
3
4
 
4
5
  def get_shared_expression(
5
- lakehouse: Optional[str] = None, workspace: Optional[str] = None
6
+ lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
6
7
  ) -> str:
7
8
  """
8
9
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse.
@@ -12,8 +13,8 @@ def get_shared_expression(
12
13
  lakehouse : str, default=None
13
14
  The Fabric lakehouse used by the Direct Lake semantic model.
14
15
  Defaults to None which resolves to the lakehouse attached to the notebook.
15
- workspace : str, default=None
16
- The Fabric workspace used by the lakehouse.
16
+ workspace : str | uuid.UUID, default=None
17
+ The Fabric workspace name or ID used by the lakehouse.
17
18
  Defaults to None which resolves to the workspace of the attached lakehouse
18
19
  or if no lakehouse attached, resolves to the workspace of the notebook.
19
20
 
@@ -2,6 +2,10 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from typing import Optional
4
4
  import sempy_labs._icons as icons
5
+ from uuid import UUID
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ )
5
9
 
6
10
 
7
11
  def get_direct_lake_guardrails() -> pd.DataFrame:
@@ -28,14 +32,14 @@ def get_direct_lake_guardrails() -> pd.DataFrame:
28
32
  return df
29
33
 
30
34
 
31
- def get_sku_size(workspace: Optional[str] = None) -> str:
35
+ def get_sku_size(workspace: Optional[str | UUID] = None) -> str:
32
36
  """
33
37
  Shows the SKU size for a workspace.
34
38
 
35
39
  Parameters
36
40
  ----------
37
- workspace : str, default=None
38
- The Fabric workspace name.
41
+ workspace : str | uuid.UUID, default=None
42
+ The Fabric workspace name or ID.
39
43
  Defaults to None which resolves to the workspace of the attached lakehouse
40
44
  or if no lakehouse attached, resolves to the workspace of the notebook.
41
45
 
@@ -45,12 +49,14 @@ def get_sku_size(workspace: Optional[str] = None) -> str:
45
49
  The SKU size for a workspace.
46
50
  """
47
51
 
48
- workspace = fabric.resolve_workspace_name(workspace)
52
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
49
53
 
50
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
54
+ dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
51
55
 
52
56
  if len(dfW) == 0:
53
- raise ValueError(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
57
+ raise ValueError(
58
+ f"{icons.red_dot} The '{workspace_name}' is not a valid workspace."
59
+ )
54
60
 
55
61
  capacity_id = dfW["Capacity Id"].iloc[0]
56
62
  dfC = fabric.list_capacities()
@@ -5,21 +5,26 @@ from sempy_labs.tom import connect_semantic_model
5
5
  from typing import Optional
6
6
  from sempy._utils._log import log
7
7
  import sempy_labs._icons as icons
8
+ from uuid import UUID
9
+ from sempy_labs._helper_functions import (
10
+ resolve_dataset_name_and_id,
11
+ resolve_workspace_name_and_id,
12
+ )
8
13
 
9
14
 
10
15
  @log
11
16
  def list_direct_lake_model_calc_tables(
12
- dataset: str, workspace: Optional[str] = None
17
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
13
18
  ) -> pd.DataFrame:
14
19
  """
15
20
  Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery).
16
21
 
17
22
  Parameters
18
23
  ----------
19
- dataset : str
20
- Name of the semantic model.
21
- workspace : str, default=None
22
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
23
28
  Defaults to None which resolves to the workspace of the attached lakehouse
24
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
30
 
@@ -29,12 +34,13 @@ def list_direct_lake_model_calc_tables(
29
34
  A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations.
30
35
  """
31
36
 
32
- workspace = fabric.resolve_workspace_name(workspace)
37
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
38
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
33
39
 
34
40
  df = pd.DataFrame(columns=["Table Name", "Source Expression"])
35
41
 
36
42
  with connect_semantic_model(
37
- dataset=dataset, readonly=True, workspace=workspace
43
+ dataset=dataset_id, readonly=True, workspace=workspace_id
38
44
  ) as tom:
39
45
 
40
46
  is_direct_lake = tom.is_direct_lake()
@@ -44,8 +50,8 @@ def list_direct_lake_model_calc_tables(
44
50
  f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
45
51
  )
46
52
  else:
47
- dfA = fabric.list_annotations(dataset=dataset, workspace=workspace)
48
- dfT = list_tables(dataset, workspace)
53
+ dfA = fabric.list_annotations(dataset=dataset_id, workspace=workspace_id)
54
+ dfT = list_tables(dataset_id, workspace_id)
49
55
  dfA_filt = dfA[
50
56
  (dfA["Object Type"] == "Model")
51
57
  & (dfA["Annotation Name"].isin(dfT["Name"]))
@@ -1,13 +1,18 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import format_dax_object_name
3
+ from sempy_labs._helper_functions import (
4
+ format_dax_object_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
7
+ )
4
8
  from typing import Optional, Tuple
5
9
  from sempy._utils._log import log
10
+ from uuid import UUID
6
11
 
7
12
 
8
13
  @log
9
14
  def show_unsupported_direct_lake_objects(
10
- dataset: str, workspace: Optional[str] = None
15
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
11
16
  ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
12
17
  """
13
18
  Returns a list of a semantic model's objects which are not supported by Direct Lake based on
@@ -15,10 +20,10 @@ def show_unsupported_direct_lake_objects(
15
20
 
16
21
  Parameters
17
22
  ----------
18
- dataset : str
19
- Name of the semantic model.
20
- workspace : str, default=None
21
- The Fabric workspace name.
23
+ dataset : str | uuid.UUID
24
+ Name or ID of the semantic model.
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
22
27
  Defaults to None which resolves to the workspace of the attached lakehouse
23
28
  or if no lakehouse attached, resolves to the workspace of the notebook.
24
29
 
@@ -30,11 +35,12 @@ def show_unsupported_direct_lake_objects(
30
35
 
31
36
  pd.options.mode.chained_assignment = None
32
37
 
33
- workspace = fabric.resolve_workspace_name(workspace)
38
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
34
40
 
35
- dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
36
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
37
- dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
41
+ dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
42
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
43
+ dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
38
44
 
39
45
  # Calc tables
40
46
  dfT_filt = dfT[dfT["Type"] == "Calculated Table"]
@@ -2,42 +2,46 @@ import sempy.fabric as fabric
2
2
  from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_lakehouse_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
5
7
  )
6
8
  from sempy_labs.tom import connect_semantic_model
7
9
  from typing import Optional
8
10
  import sempy_labs._icons as icons
11
+ from uuid import UUID
9
12
 
10
13
 
11
14
  def update_direct_lake_model_lakehouse_connection(
12
- dataset: str,
13
- workspace: Optional[str] = None,
15
+ dataset: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
14
17
  lakehouse: Optional[str] = None,
15
- lakehouse_workspace: Optional[str] = None,
18
+ lakehouse_workspace: Optional[str | UUID] = None,
16
19
  ):
17
20
  """
18
21
  Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse.
19
22
 
20
23
  Parameters
21
24
  ----------
22
- dataset : str
23
- Name of the semantic model.
24
- workspace : str, default=None
25
- The Fabric workspace name in which the semantic model exists.
25
+ dataset : str | UUID
26
+ Name or ID of the semantic model.
27
+ workspace : str | UUID, default=None
28
+ The Fabric workspace name or ID in which the semantic model exists.
26
29
  Defaults to None which resolves to the workspace of the attached lakehouse
27
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
28
31
  lakehouse : str, default=None
29
32
  The Fabric lakehouse used by the Direct Lake semantic model.
30
33
  Defaults to None which resolves to the lakehouse attached to the notebook.
31
- lakehouse_workspace : str, default=None
32
- The Fabric workspace used by the lakehouse.
34
+ lakehouse_workspace : str | UUID, default=None
35
+ The Fabric workspace name or ID used by the lakehouse.
33
36
  Defaults to None which resolves to the workspace of the attached lakehouse
34
37
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
38
  """
36
39
 
37
- workspace = fabric.resolve_workspace_name(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
41
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
38
42
 
39
43
  if lakehouse_workspace is None:
40
- lakehouse_workspace = workspace
44
+ lakehouse_workspace = workspace_name
41
45
 
42
46
  if lakehouse is None:
43
47
  lakehouse_id = fabric.get_lakehouse_id()
@@ -50,7 +54,7 @@ def update_direct_lake_model_lakehouse_connection(
50
54
  if len(dfI_filt) == 0:
51
55
  raise ValueError(
52
56
  f"{icons.red_dot} The '{lakehouse}' lakehouse does not exist within the '{lakehouse_workspace}' workspace. "
53
- f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
57
+ f"Therefore it cannot be used to support the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
54
58
  )
55
59
 
56
60
  icons.sll_tags.append("UpdateDLConnection")
@@ -60,37 +64,37 @@ def update_direct_lake_model_lakehouse_connection(
60
64
  )
61
65
 
62
66
  with connect_semantic_model(
63
- dataset=dataset, readonly=False, workspace=workspace
67
+ dataset=dataset_id, readonly=False, workspace=workspace_id
64
68
  ) as tom:
65
69
 
66
70
  if not tom.is_direct_lake():
67
71
  raise ValueError(
68
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
72
+ f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
69
73
  )
70
74
 
71
75
  tom.model.Expressions["DatabaseQuery"].Expression = shEx
72
76
 
73
77
  print(
74
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
78
+ f"{icons.green_dot} The expression in the '{dataset_name}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
75
79
  )
76
80
 
77
81
 
78
82
  def update_direct_lake_model_connection(
79
- dataset: str,
80
- workspace: Optional[str] = None,
83
+ dataset: str | UUID,
84
+ workspace: Optional[str | UUID] = None,
81
85
  source: Optional[str] = None,
82
- source_type: Optional[str] = "Lakehouse",
83
- source_workspace: Optional[str] = None,
86
+ source_type: str = "Lakehouse",
87
+ source_workspace: Optional[str | UUID] = None,
84
88
  ):
85
89
  """
86
90
  Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse/warehouse.
87
91
 
88
92
  Parameters
89
93
  ----------
90
- dataset : str
91
- Name of the semantic model.
92
- workspace : str, default=None
93
- The Fabric workspace name in which the semantic model exists.
94
+ dataset : str | UUID
95
+ Name or ID of the semantic model.
96
+ workspace : str | UUID, default=None
97
+ The Fabric workspace name or ID in which the semantic model exists.
94
98
  Defaults to None which resolves to the workspace of the attached lakehouse
95
99
  or if no lakehouse attached, resolves to the workspace of the notebook.
96
100
  source : str, default=None
@@ -98,14 +102,14 @@ def update_direct_lake_model_connection(
98
102
  Defaults to None which resolves to the lakehouse attached to the notebook.
99
103
  source_type : str, default="Lakehouse"
100
104
  The type of source for the Direct Lake semantic model. Valid options: "Lakehouse", "Warehouse".
101
- source_workspace : str, default=None
102
- The Fabric workspace used by the lakehouse/warehouse.
105
+ source_workspace : str | UUID, default=None
106
+ The Fabric workspace name or ID used by the lakehouse/warehouse.
103
107
  Defaults to None which resolves to the workspace of the attached lakehouse
104
108
  or if no lakehouse attached, resolves to the workspace of the notebook.
105
109
  """
106
110
 
107
- if workspace is None:
108
- workspace = fabric.resolve_workspace_name(workspace)
111
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
109
113
 
110
114
  source_type = source_type.capitalize()
111
115
 
@@ -115,7 +119,7 @@ def update_direct_lake_model_connection(
115
119
  )
116
120
 
117
121
  if source_workspace is None:
118
- source_workspace = workspace
122
+ source_workspace = workspace_name
119
123
 
120
124
  if source is None:
121
125
  source_id = fabric.get_lakehouse_id()
@@ -135,16 +139,16 @@ def update_direct_lake_model_connection(
135
139
  )
136
140
 
137
141
  with connect_semantic_model(
138
- dataset=dataset, readonly=False, workspace=workspace
142
+ dataset=dataset_id, readonly=False, workspace=workspace_id
139
143
  ) as tom:
140
144
 
141
145
  if not tom.is_direct_lake():
142
146
  raise ValueError(
143
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
147
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
144
148
  )
145
149
 
146
150
  tom.model.Expressions["DatabaseQuery"].Expression = shEx
147
151
 
148
152
  print(
149
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
153
+ f"{icons.green_dot} The expression in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
150
154
  )
@@ -3,36 +3,41 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.tom import connect_semantic_model
4
4
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
5
5
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
6
- from sempy_labs._helper_functions import _convert_data_type
6
+ from sempy_labs._helper_functions import (
7
+ _convert_data_type,
8
+ resolve_dataset_name_and_id,
9
+ resolve_workspace_name_and_id,
10
+ )
7
11
  from typing import List, Optional, Union
8
12
  import sempy_labs._icons as icons
13
+ from uuid import UUID
9
14
 
10
15
 
11
16
  def update_direct_lake_partition_entity(
12
- dataset: str,
17
+ dataset: str | UUID,
13
18
  table_name: Union[str, List[str]],
14
19
  entity_name: Union[str, List[str]],
15
- workspace: Optional[str] = None,
20
+ workspace: Optional[str | UUID] = None,
16
21
  ):
17
22
  """
18
23
  Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
19
24
 
20
25
  Parameters
21
26
  ----------
22
- dataset : str
23
- Name of the semantic model.
27
+ dataset : str | uuid.UUID
28
+ Name or ID of the semantic model.
24
29
  table_name : str, List[str]
25
30
  Name of the table(s) in the semantic model.
26
31
  entity_name : str, List[str]
27
32
  Name of the lakehouse table to be mapped to the semantic model table.
28
- workspace : str, default=None
29
- The Fabric workspace name in which the semantic model exists.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID in which the semantic model exists.
30
35
  Defaults to None which resolves to the workspace of the attached lakehouse
31
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
37
  """
33
38
 
34
- if workspace is None:
35
- workspace = fabric.resolve_workspace_name(workspace)
39
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
36
41
 
37
42
  # Support both str & list types
38
43
  if isinstance(table_name, str):
@@ -48,12 +53,12 @@ def update_direct_lake_partition_entity(
48
53
  icons.sll_tags.append("UpdateDLPartition")
49
54
 
50
55
  with connect_semantic_model(
51
- dataset=dataset, readonly=False, workspace=workspace
56
+ dataset=dataset_id, readonly=False, workspace=workspace_id
52
57
  ) as tom:
53
58
 
54
59
  if not tom.is_direct_lake():
55
60
  raise ValueError(
56
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
61
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode."
57
62
  )
58
63
 
59
64
  for tName in table_name:
@@ -68,42 +73,39 @@ def update_direct_lake_partition_entity(
68
73
 
69
74
  if part_name is None:
70
75
  raise ValueError(
71
- f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated."
76
+ f"{icons.red_dot} The '{tName}' table in the '{dataset_name}' semantic model has not been updated."
72
77
  )
73
78
 
74
79
  tom.model.Tables[tName].Partitions[part_name].Source.EntityName = eName
75
80
  print(
76
- f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table."
81
+ f"{icons.green_dot} The '{tName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{eName}' table."
77
82
  )
78
83
 
79
84
 
80
85
  def add_table_to_direct_lake_semantic_model(
81
- dataset: str,
86
+ dataset: str | UUID,
82
87
  table_name: str,
83
88
  lakehouse_table_name: str,
84
89
  refresh: bool = True,
85
- workspace: Optional[str] = None,
90
+ workspace: Optional[str | UUID] = None,
86
91
  ):
87
92
  """
88
93
  Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table.
89
94
 
90
95
  Parameters
91
96
  ----------
92
- dataset : str
93
- Name of the semantic model.
97
+ dataset : str | uuid.UUID
98
+ Name or ID of the semantic model.
94
99
  table_name : str, List[str]
95
100
  Name of the table in the semantic model.
96
101
  lakehouse_table_name : str
97
102
  The name of the Fabric lakehouse table.
98
103
  refresh : bool, default=True
99
104
  Refreshes the table after it is added to the semantic model.
100
- workspace : str, default=None
101
- The name of the Fabric workspace in which the semantic model resides.
105
+ workspace : str | uuid.UUID, default=None
106
+ The name or ID of the Fabric workspace in which the semantic model resides.
102
107
  Defaults to None which resolves to the workspace of the attached lakehouse
103
108
  or if no lakehouse attached, resolves to the workspace of the notebook.
104
-
105
- Returns
106
- -------
107
109
  """
108
110
 
109
111
  sempy.fabric._client._utils._init_analysis_services()
@@ -111,10 +113,11 @@ def add_table_to_direct_lake_semantic_model(
111
113
  from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
112
114
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
113
115
 
114
- workspace = fabric.resolve_workspace_name(workspace)
116
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
117
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
115
118
 
116
119
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
117
- get_direct_lake_source(dataset=dataset, workspace=workspace)
120
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
118
121
  )
119
122
 
120
123
  if artifact_type == "Warehouse":
@@ -125,7 +128,7 @@ def add_table_to_direct_lake_semantic_model(
125
128
  lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
126
129
 
127
130
  with connect_semantic_model(
128
- dataset=dataset, readonly=False, workspace=workspace
131
+ dataset=dataset_id, readonly=False, workspace=workspace_id
129
132
  ) as tom:
130
133
 
131
134
  table_count = tom.model.Tables.Count
@@ -148,12 +151,12 @@ def add_table_to_direct_lake_semantic_model(
148
151
  == TOM.PartitionSourceType.Entity
149
152
  )
150
153
  raise ValueError(
151
- f"The '{lakehouse_table_name}' table already exists in the '{dataset}' semantic model within the '{workspace}' workspace as the '{t_name}' table."
154
+ f"The '{lakehouse_table_name}' table already exists in the '{dataset_name}' semantic model within the '{workspace_name}' workspace as the '{t_name}' table."
152
155
  )
153
156
 
154
157
  if any(t.Name == table_name for t in tom.model.Tables):
155
158
  raise ValueError(
156
- f"The '{table_name}' table already exists in the '{dataset}' semantic model within the '{workspace}' workspace."
159
+ f"The '{table_name}' table already exists in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
157
160
  )
158
161
 
159
162
  dfL = get_lakehouse_tables(
@@ -173,13 +176,13 @@ def add_table_to_direct_lake_semantic_model(
173
176
 
174
177
  tom.add_table(name=table_name)
175
178
  print(
176
- f"{icons.green_dot} The '{table_name}' table has been added to the '{dataset}' semantic model within the '{workspace}' workspace."
179
+ f"{icons.green_dot} The '{table_name}' table has been added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
177
180
  )
178
181
  tom.add_entity_partition(
179
182
  table_name=table_name, entity_name=lakehouse_table_name
180
183
  )
181
184
  print(
182
- f"{icons.green_dot} The '{lakehouse_table_name}' partition has been added to the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
185
+ f"{icons.green_dot} The '{lakehouse_table_name}' partition has been added to the '{table_name}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
183
186
  )
184
187
 
185
188
  for i, r in dfLC_filt.iterrows():
@@ -193,10 +196,10 @@ def add_table_to_direct_lake_semantic_model(
193
196
  data_type=dt,
194
197
  )
195
198
  print(
196
- f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset}' semantic model within the '{workspace}' workspace."
199
+ f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
197
200
  )
198
201
 
199
202
  if refresh:
200
203
  refresh_semantic_model(
201
- dataset=dataset, tables=table_name, workspace=workspace
204
+ dataset=dataset_id, tables=table_name, workspace=workspace_id
202
205
  )