semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -3,65 +3,61 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_id,
4
4
  resolve_lakehouse_name,
5
5
  get_direct_lake_sql_endpoint,
6
+ resolve_workspace_name_and_id,
7
+ resolve_dataset_name_and_id,
6
8
  )
7
9
  from typing import Optional, Tuple
8
10
  from uuid import UUID
11
+ import sempy_labs._icons as icons
9
12
 
10
13
 
11
14
  def get_direct_lake_lakehouse(
12
- dataset: str,
13
- workspace: Optional[str] = None,
15
+ dataset: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
14
17
  lakehouse: Optional[str] = None,
15
- lakehouse_workspace: Optional[str] = None,
18
+ lakehouse_workspace: Optional[str | UUID] = None,
16
19
  ) -> Tuple[str, UUID]:
17
20
  """
18
21
  Identifies the lakehouse used by a Direct Lake semantic model.
19
22
 
20
23
  Parameters
21
24
  ----------
22
- dataset : str
23
- Name of the semantic model.
24
- workspace : str, default=None
25
- The Fabric workspace name.
25
+ dataset : str | uuid.UUID
26
+ Name or ID of the semantic model.
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
26
29
  Defaults to None which resolves to the workspace of the attached lakehouse
27
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
28
31
  lakehouse : str, default=None
29
32
  The Fabric lakehouse used by the Direct Lake semantic model.
30
33
  Defaults to None which resolves to the lakehouse attached to the notebook.
31
- lakehouse_workspace : str, default=None
32
- The Fabric workspace used by the lakehouse.
34
+ lakehouse_workspace : str | uuid.UUID, default=None
35
+ The Fabric workspace name or ID used by the lakehouse.
33
36
  Defaults to None which resolves to the workspace of the attached lakehouse
34
37
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
38
 
36
39
  Returns
37
40
  -------
38
- str, uuid.UUID
41
+ Tuple[str, uuid.UUID]
39
42
  The lakehouse name and lakehouse ID.
40
43
  """
41
44
 
42
- workspace = fabric.resolve_workspace_name(workspace)
43
-
44
- if lakehouse_workspace is None:
45
- lakehouse_workspace = workspace
46
-
47
- if lakehouse is None:
48
- lakehouse_id = fabric.get_lakehouse_id()
49
- lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
50
-
51
- # dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
52
- # dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
53
-
54
- # if len(dfP_filt) == 0:
55
- # raise ValueError(
56
- # f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
57
- # )
58
-
59
- sqlEndpointId = get_direct_lake_sql_endpoint(dataset, workspace)
60
-
61
- dfI = fabric.list_items(workspace=lakehouse_workspace, type="SQLEndpoint")
62
- dfI_filt = dfI[dfI["Id"] == sqlEndpointId]
63
- lakehouseName = dfI_filt["Display Name"].iloc[0]
64
-
65
- lakehouseId = resolve_lakehouse_id(lakehouseName, lakehouse_workspace)
66
-
67
- return lakehouseName, lakehouseId
45
+ from sempy_labs.directlake._dl_helper import get_direct_lake_source
46
+
47
+ artifact_type, artifact_name, artifact_id, workspace_id = get_direct_lake_source(
48
+ dataset=dataset, workspace=workspace
49
+ )
50
+
51
+ if artifact_type in ["Lakehouse", "Warehouse"]:
52
+ return artifact_name, artifact_id
53
+ else:
54
+ dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
55
+ dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
56
+ if dfP_filt.empty:
57
+ raise ValueError(
58
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
59
+ )
60
+ lakehouse_id = resolve_lakehouse_id(
61
+ lakehouse=lakehouse, workspace=lakehouse_workspace
62
+ )
63
+ return lakehouse, lakehouse_id
@@ -1,8 +1,9 @@
1
1
  from typing import Optional
2
+ from uuid import UUID
2
3
 
3
4
 
4
5
  def get_shared_expression(
5
- lakehouse: Optional[str] = None, workspace: Optional[str] = None
6
+ lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
6
7
  ) -> str:
7
8
  """
8
9
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse.
@@ -12,8 +13,8 @@ def get_shared_expression(
12
13
  lakehouse : str, default=None
13
14
  The Fabric lakehouse used by the Direct Lake semantic model.
14
15
  Defaults to None which resolves to the lakehouse attached to the notebook.
15
- workspace : str, default=None
16
- The Fabric workspace used by the lakehouse.
16
+ workspace : str | uuid.UUID, default=None
17
+ The Fabric workspace name or ID used by the lakehouse.
17
18
  Defaults to None which resolves to the workspace of the attached lakehouse
18
19
  or if no lakehouse attached, resolves to the workspace of the notebook.
19
20
 
@@ -2,6 +2,10 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from typing import Optional
4
4
  import sempy_labs._icons as icons
5
+ from uuid import UUID
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ )
5
9
 
6
10
 
7
11
  def get_direct_lake_guardrails() -> pd.DataFrame:
@@ -28,14 +32,14 @@ def get_direct_lake_guardrails() -> pd.DataFrame:
28
32
  return df
29
33
 
30
34
 
31
- def get_sku_size(workspace: Optional[str] = None) -> str:
35
+ def get_sku_size(workspace: Optional[str | UUID] = None) -> str:
32
36
  """
33
37
  Shows the SKU size for a workspace.
34
38
 
35
39
  Parameters
36
40
  ----------
37
- workspace : str, default=None
38
- The Fabric workspace name.
41
+ workspace : str | uuid.UUID, default=None
42
+ The Fabric workspace name or ID.
39
43
  Defaults to None which resolves to the workspace of the attached lakehouse
40
44
  or if no lakehouse attached, resolves to the workspace of the notebook.
41
45
 
@@ -45,12 +49,14 @@ def get_sku_size(workspace: Optional[str] = None) -> str:
45
49
  The SKU size for a workspace.
46
50
  """
47
51
 
48
- workspace = fabric.resolve_workspace_name(workspace)
52
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
49
53
 
50
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
54
+ dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
51
55
 
52
56
  if len(dfW) == 0:
53
- raise ValueError(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
57
+ raise ValueError(
58
+ f"{icons.red_dot} The '{workspace_name}' is not a valid workspace."
59
+ )
54
60
 
55
61
  capacity_id = dfW["Capacity Id"].iloc[0]
56
62
  dfC = fabric.list_capacities()
@@ -5,21 +5,26 @@ from sempy_labs.tom import connect_semantic_model
5
5
  from typing import Optional
6
6
  from sempy._utils._log import log
7
7
  import sempy_labs._icons as icons
8
+ from uuid import UUID
9
+ from sempy_labs._helper_functions import (
10
+ resolve_dataset_name_and_id,
11
+ resolve_workspace_name_and_id,
12
+ )
8
13
 
9
14
 
10
15
  @log
11
16
  def list_direct_lake_model_calc_tables(
12
- dataset: str, workspace: Optional[str] = None
17
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
13
18
  ) -> pd.DataFrame:
14
19
  """
15
20
  Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery).
16
21
 
17
22
  Parameters
18
23
  ----------
19
- dataset : str
20
- Name of the semantic model.
21
- workspace : str, default=None
22
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
23
28
  Defaults to None which resolves to the workspace of the attached lakehouse
24
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
30
 
@@ -29,12 +34,13 @@ def list_direct_lake_model_calc_tables(
29
34
  A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations.
30
35
  """
31
36
 
32
- workspace = fabric.resolve_workspace_name(workspace)
37
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
38
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
33
39
 
34
40
  df = pd.DataFrame(columns=["Table Name", "Source Expression"])
35
41
 
36
42
  with connect_semantic_model(
37
- dataset=dataset, readonly=True, workspace=workspace
43
+ dataset=dataset_id, readonly=True, workspace=workspace_id
38
44
  ) as tom:
39
45
 
40
46
  is_direct_lake = tom.is_direct_lake()
@@ -44,8 +50,8 @@ def list_direct_lake_model_calc_tables(
44
50
  f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
45
51
  )
46
52
  else:
47
- dfA = fabric.list_annotations(dataset=dataset, workspace=workspace)
48
- dfT = list_tables(dataset, workspace)
53
+ dfA = fabric.list_annotations(dataset=dataset_id, workspace=workspace_id)
54
+ dfT = list_tables(dataset_id, workspace_id)
49
55
  dfA_filt = dfA[
50
56
  (dfA["Object Type"] == "Model")
51
57
  & (dfA["Annotation Name"].isin(dfT["Name"]))
@@ -1,13 +1,18 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import format_dax_object_name
3
+ from sempy_labs._helper_functions import (
4
+ format_dax_object_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
7
+ )
4
8
  from typing import Optional, Tuple
5
9
  from sempy._utils._log import log
10
+ from uuid import UUID
6
11
 
7
12
 
8
13
  @log
9
14
  def show_unsupported_direct_lake_objects(
10
- dataset: str, workspace: Optional[str] = None
15
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
11
16
  ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
12
17
  """
13
18
  Returns a list of a semantic model's objects which are not supported by Direct Lake based on
@@ -15,10 +20,10 @@ def show_unsupported_direct_lake_objects(
15
20
 
16
21
  Parameters
17
22
  ----------
18
- dataset : str
19
- Name of the semantic model.
20
- workspace : str, default=None
21
- The Fabric workspace name.
23
+ dataset : str | uuid.UUID
24
+ Name or ID of the semantic model.
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
22
27
  Defaults to None which resolves to the workspace of the attached lakehouse
23
28
  or if no lakehouse attached, resolves to the workspace of the notebook.
24
29
 
@@ -30,11 +35,12 @@ def show_unsupported_direct_lake_objects(
30
35
 
31
36
  pd.options.mode.chained_assignment = None
32
37
 
33
- workspace = fabric.resolve_workspace_name(workspace)
38
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
34
40
 
35
- dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
36
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
37
- dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
41
+ dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
42
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
43
+ dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
38
44
 
39
45
  # Calc tables
40
46
  dfT_filt = dfT[dfT["Type"] == "Calculated Table"]
@@ -2,42 +2,46 @@ import sempy.fabric as fabric
2
2
  from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_lakehouse_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
5
7
  )
6
8
  from sempy_labs.tom import connect_semantic_model
7
9
  from typing import Optional
8
10
  import sempy_labs._icons as icons
11
+ from uuid import UUID
9
12
 
10
13
 
11
14
  def update_direct_lake_model_lakehouse_connection(
12
- dataset: str,
13
- workspace: Optional[str] = None,
15
+ dataset: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
14
17
  lakehouse: Optional[str] = None,
15
- lakehouse_workspace: Optional[str] = None,
18
+ lakehouse_workspace: Optional[str | UUID] = None,
16
19
  ):
17
20
  """
18
21
  Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse.
19
22
 
20
23
  Parameters
21
24
  ----------
22
- dataset : str
23
- Name of the semantic model.
24
- workspace : str, default=None
25
- The Fabric workspace name in which the semantic model exists.
25
+ dataset : str | UUID
26
+ Name or ID of the semantic model.
27
+ workspace : str | UUID, default=None
28
+ The Fabric workspace name or ID in which the semantic model exists.
26
29
  Defaults to None which resolves to the workspace of the attached lakehouse
27
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
28
31
  lakehouse : str, default=None
29
32
  The Fabric lakehouse used by the Direct Lake semantic model.
30
33
  Defaults to None which resolves to the lakehouse attached to the notebook.
31
- lakehouse_workspace : str, default=None
32
- The Fabric workspace used by the lakehouse.
34
+ lakehouse_workspace : str | UUID, default=None
35
+ The Fabric workspace name or ID used by the lakehouse.
33
36
  Defaults to None which resolves to the workspace of the attached lakehouse
34
37
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
38
  """
36
39
 
37
- workspace = fabric.resolve_workspace_name(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
41
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
38
42
 
39
43
  if lakehouse_workspace is None:
40
- lakehouse_workspace = workspace
44
+ lakehouse_workspace = workspace_name
41
45
 
42
46
  if lakehouse is None:
43
47
  lakehouse_id = fabric.get_lakehouse_id()
@@ -50,7 +54,7 @@ def update_direct_lake_model_lakehouse_connection(
50
54
  if len(dfI_filt) == 0:
51
55
  raise ValueError(
52
56
  f"{icons.red_dot} The '{lakehouse}' lakehouse does not exist within the '{lakehouse_workspace}' workspace. "
53
- f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
57
+ f"Therefore it cannot be used to support the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
54
58
  )
55
59
 
56
60
  icons.sll_tags.append("UpdateDLConnection")
@@ -60,37 +64,37 @@ def update_direct_lake_model_lakehouse_connection(
60
64
  )
61
65
 
62
66
  with connect_semantic_model(
63
- dataset=dataset, readonly=False, workspace=workspace
67
+ dataset=dataset_id, readonly=False, workspace=workspace_id
64
68
  ) as tom:
65
69
 
66
70
  if not tom.is_direct_lake():
67
71
  raise ValueError(
68
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
72
+ f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
69
73
  )
70
74
 
71
75
  tom.model.Expressions["DatabaseQuery"].Expression = shEx
72
76
 
73
77
  print(
74
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
78
+ f"{icons.green_dot} The expression in the '{dataset_name}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
75
79
  )
76
80
 
77
81
 
78
82
  def update_direct_lake_model_connection(
79
- dataset: str,
80
- workspace: Optional[str] = None,
83
+ dataset: str | UUID,
84
+ workspace: Optional[str | UUID] = None,
81
85
  source: Optional[str] = None,
82
- source_type: Optional[str] = "Lakehouse",
83
- source_workspace: Optional[str] = None,
86
+ source_type: str = "Lakehouse",
87
+ source_workspace: Optional[str | UUID] = None,
84
88
  ):
85
89
  """
86
90
  Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse/warehouse.
87
91
 
88
92
  Parameters
89
93
  ----------
90
- dataset : str
91
- Name of the semantic model.
92
- workspace : str, default=None
93
- The Fabric workspace name in which the semantic model exists.
94
+ dataset : str | UUID
95
+ Name or ID of the semantic model.
96
+ workspace : str | UUID, default=None
97
+ The Fabric workspace name or ID in which the semantic model exists.
94
98
  Defaults to None which resolves to the workspace of the attached lakehouse
95
99
  or if no lakehouse attached, resolves to the workspace of the notebook.
96
100
  source : str, default=None
@@ -98,14 +102,14 @@ def update_direct_lake_model_connection(
98
102
  Defaults to None which resolves to the lakehouse attached to the notebook.
99
103
  source_type : str, default="Lakehouse"
100
104
  The type of source for the Direct Lake semantic model. Valid options: "Lakehouse", "Warehouse".
101
- source_workspace : str, default=None
102
- The Fabric workspace used by the lakehouse/warehouse.
105
+ source_workspace : str | UUID, default=None
106
+ The Fabric workspace name or ID used by the lakehouse/warehouse.
103
107
  Defaults to None which resolves to the workspace of the attached lakehouse
104
108
  or if no lakehouse attached, resolves to the workspace of the notebook.
105
109
  """
106
110
 
107
- if workspace is None:
108
- workspace = fabric.resolve_workspace_name(workspace)
111
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
109
113
 
110
114
  source_type = source_type.capitalize()
111
115
 
@@ -115,7 +119,7 @@ def update_direct_lake_model_connection(
115
119
  )
116
120
 
117
121
  if source_workspace is None:
118
- source_workspace = workspace
122
+ source_workspace = workspace_name
119
123
 
120
124
  if source is None:
121
125
  source_id = fabric.get_lakehouse_id()
@@ -135,16 +139,16 @@ def update_direct_lake_model_connection(
135
139
  )
136
140
 
137
141
  with connect_semantic_model(
138
- dataset=dataset, readonly=False, workspace=workspace
142
+ dataset=dataset_id, readonly=False, workspace=workspace_id
139
143
  ) as tom:
140
144
 
141
145
  if not tom.is_direct_lake():
142
146
  raise ValueError(
143
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
147
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
144
148
  )
145
149
 
146
150
  tom.model.Expressions["DatabaseQuery"].Expression = shEx
147
151
 
148
152
  print(
149
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
153
+ f"{icons.green_dot} The expression in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
150
154
  )
@@ -3,36 +3,41 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.tom import connect_semantic_model
4
4
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
5
5
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
6
- from sempy_labs._helper_functions import _convert_data_type
6
+ from sempy_labs._helper_functions import (
7
+ _convert_data_type,
8
+ resolve_dataset_name_and_id,
9
+ resolve_workspace_name_and_id,
10
+ )
7
11
  from typing import List, Optional, Union
8
12
  import sempy_labs._icons as icons
13
+ from uuid import UUID
9
14
 
10
15
 
11
16
  def update_direct_lake_partition_entity(
12
- dataset: str,
17
+ dataset: str | UUID,
13
18
  table_name: Union[str, List[str]],
14
19
  entity_name: Union[str, List[str]],
15
- workspace: Optional[str] = None,
20
+ workspace: Optional[str | UUID] = None,
16
21
  ):
17
22
  """
18
23
  Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
19
24
 
20
25
  Parameters
21
26
  ----------
22
- dataset : str
23
- Name of the semantic model.
27
+ dataset : str | uuid.UUID
28
+ Name or ID of the semantic model.
24
29
  table_name : str, List[str]
25
30
  Name of the table(s) in the semantic model.
26
31
  entity_name : str, List[str]
27
32
  Name of the lakehouse table to be mapped to the semantic model table.
28
- workspace : str, default=None
29
- The Fabric workspace name in which the semantic model exists.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID in which the semantic model exists.
30
35
  Defaults to None which resolves to the workspace of the attached lakehouse
31
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
37
  """
33
38
 
34
- if workspace is None:
35
- workspace = fabric.resolve_workspace_name(workspace)
39
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
36
41
 
37
42
  # Support both str & list types
38
43
  if isinstance(table_name, str):
@@ -48,12 +53,12 @@ def update_direct_lake_partition_entity(
48
53
  icons.sll_tags.append("UpdateDLPartition")
49
54
 
50
55
  with connect_semantic_model(
51
- dataset=dataset, readonly=False, workspace=workspace
56
+ dataset=dataset_id, readonly=False, workspace=workspace_id
52
57
  ) as tom:
53
58
 
54
59
  if not tom.is_direct_lake():
55
60
  raise ValueError(
56
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
61
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode."
57
62
  )
58
63
 
59
64
  for tName in table_name:
@@ -68,42 +73,39 @@ def update_direct_lake_partition_entity(
68
73
 
69
74
  if part_name is None:
70
75
  raise ValueError(
71
- f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated."
76
+ f"{icons.red_dot} The '{tName}' table in the '{dataset_name}' semantic model has not been updated."
72
77
  )
73
78
 
74
79
  tom.model.Tables[tName].Partitions[part_name].Source.EntityName = eName
75
80
  print(
76
- f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table."
81
+ f"{icons.green_dot} The '{tName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{eName}' table."
77
82
  )
78
83
 
79
84
 
80
85
  def add_table_to_direct_lake_semantic_model(
81
- dataset: str,
86
+ dataset: str | UUID,
82
87
  table_name: str,
83
88
  lakehouse_table_name: str,
84
89
  refresh: bool = True,
85
- workspace: Optional[str] = None,
90
+ workspace: Optional[str | UUID] = None,
86
91
  ):
87
92
  """
88
93
  Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table.
89
94
 
90
95
  Parameters
91
96
  ----------
92
- dataset : str
93
- Name of the semantic model.
97
+ dataset : str | uuid.UUID
98
+ Name or ID of the semantic model.
94
99
  table_name : str, List[str]
95
100
  Name of the table in the semantic model.
96
101
  lakehouse_table_name : str
97
102
  The name of the Fabric lakehouse table.
98
103
  refresh : bool, default=True
99
104
  Refreshes the table after it is added to the semantic model.
100
- workspace : str, default=None
101
- The name of the Fabric workspace in which the semantic model resides.
105
+ workspace : str | uuid.UUID, default=None
106
+ The name or ID of the Fabric workspace in which the semantic model resides.
102
107
  Defaults to None which resolves to the workspace of the attached lakehouse
103
108
  or if no lakehouse attached, resolves to the workspace of the notebook.
104
-
105
- Returns
106
- -------
107
109
  """
108
110
 
109
111
  sempy.fabric._client._utils._init_analysis_services()
@@ -111,10 +113,11 @@ def add_table_to_direct_lake_semantic_model(
111
113
  from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
112
114
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
113
115
 
114
- workspace = fabric.resolve_workspace_name(workspace)
116
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
117
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
115
118
 
116
119
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
117
- get_direct_lake_source(dataset=dataset, workspace=workspace)
120
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
118
121
  )
119
122
 
120
123
  if artifact_type == "Warehouse":
@@ -122,10 +125,15 @@ def add_table_to_direct_lake_semantic_model(
122
125
  f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
123
126
  )
124
127
 
128
+ if artifact_type is None:
129
+ raise ValueError(
130
+ f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
131
+ )
132
+
125
133
  lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
126
134
 
127
135
  with connect_semantic_model(
128
- dataset=dataset, readonly=False, workspace=workspace
136
+ dataset=dataset_id, readonly=False, workspace=workspace_id
129
137
  ) as tom:
130
138
 
131
139
  table_count = tom.model.Tables.Count
@@ -148,12 +156,12 @@ def add_table_to_direct_lake_semantic_model(
148
156
  == TOM.PartitionSourceType.Entity
149
157
  )
150
158
  raise ValueError(
151
- f"The '{lakehouse_table_name}' table already exists in the '{dataset}' semantic model within the '{workspace}' workspace as the '{t_name}' table."
159
+ f"The '{lakehouse_table_name}' table already exists in the '{dataset_name}' semantic model within the '{workspace_name}' workspace as the '{t_name}' table."
152
160
  )
153
161
 
154
162
  if any(t.Name == table_name for t in tom.model.Tables):
155
163
  raise ValueError(
156
- f"The '{table_name}' table already exists in the '{dataset}' semantic model within the '{workspace}' workspace."
164
+ f"The '{table_name}' table already exists in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
157
165
  )
158
166
 
159
167
  dfL = get_lakehouse_tables(
@@ -173,13 +181,13 @@ def add_table_to_direct_lake_semantic_model(
173
181
 
174
182
  tom.add_table(name=table_name)
175
183
  print(
176
- f"{icons.green_dot} The '{table_name}' table has been added to the '{dataset}' semantic model within the '{workspace}' workspace."
184
+ f"{icons.green_dot} The '{table_name}' table has been added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
177
185
  )
178
186
  tom.add_entity_partition(
179
187
  table_name=table_name, entity_name=lakehouse_table_name
180
188
  )
181
189
  print(
182
- f"{icons.green_dot} The '{lakehouse_table_name}' partition has been added to the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
190
+ f"{icons.green_dot} The '{lakehouse_table_name}' partition has been added to the '{table_name}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
183
191
  )
184
192
 
185
193
  for i, r in dfLC_filt.iterrows():
@@ -193,10 +201,10 @@ def add_table_to_direct_lake_semantic_model(
193
201
  data_type=dt,
194
202
  )
195
203
  print(
196
- f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset}' semantic model within the '{workspace}' workspace."
204
+ f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
197
205
  )
198
206
 
199
207
  if refresh:
200
208
  refresh_semantic_model(
201
- dataset=dataset, tables=table_name, workspace=workspace
209
+ dataset=dataset_id, tables=table_name, workspace=workspace_id
202
210
  )