semantic-link-labs 0.11.2__py3-none-any.whl → 0.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (29) hide show
  1. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.11.3.dist-info}/METADATA +4 -4
  2. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.11.3.dist-info}/RECORD +26 -24
  3. sempy_labs/__init__.py +12 -18
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_external_data_shares.py +55 -1
  6. sempy_labs/_helper_functions.py +169 -5
  7. sempy_labs/_labels.py +126 -0
  8. sempy_labs/_list_functions.py +1 -1
  9. sempy_labs/_notebooks.py +152 -3
  10. sempy_labs/directlake/_dl_helper.py +4 -1
  11. sempy_labs/graph/_users.py +3 -5
  12. sempy_labs/lakehouse/_helper.py +18 -9
  13. sempy_labs/lakehouse/_lakehouse.py +18 -9
  14. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
  15. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
  16. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
  17. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
  18. sempy_labs/ml_model/__init__.py +23 -0
  19. sempy_labs/ml_model/_functions.py +427 -0
  20. sempy_labs/report/_reportwrapper.py +1 -1
  21. sempy_labs/tom/_model.py +8 -3
  22. sempy_labs/variable_library/__init__.py +19 -0
  23. sempy_labs/variable_library/_functions.py +403 -0
  24. sempy_labs/_dax_query_view.py +0 -57
  25. sempy_labs/_ml_models.py +0 -111
  26. sempy_labs/_variable_libraries.py +0 -92
  27. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.11.3.dist-info}/WHEEL +0 -0
  28. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.11.3.dist-info}/licenses/LICENSE +0 -0
  29. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.11.3.dist-info}/top_level.txt +0 -0
@@ -2,28 +2,29 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  import re
5
- from ..lakehouse._get_lakehouse_tables import get_lakehouse_tables
6
- from .._helper_functions import (
7
- resolve_lakehouse_name,
8
- resolve_lakehouse_id,
5
+ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
6
+ from sempy_labs._helper_functions import (
9
7
  retry,
10
8
  generate_guid,
11
9
  save_as_delta_table,
10
+ resolve_lakehouse_name_and_id,
11
+ resolve_workspace_name_and_id,
12
12
  )
13
- from ..tom import connect_semantic_model
13
+ from sempy_labs.tom import connect_semantic_model
14
14
  from typing import Optional
15
15
  from sempy._utils._log import log
16
16
  import sempy_labs._icons as icons
17
+ from uuid import UUID
17
18
 
18
19
 
19
20
  @log
20
21
  def migrate_calc_tables_to_lakehouse(
21
22
  dataset: str,
22
23
  new_dataset: str,
23
- workspace: Optional[str] = None,
24
- new_dataset_workspace: Optional[str] = None,
25
- lakehouse: Optional[str] = None,
26
- lakehouse_workspace: Optional[str] = None,
24
+ workspace: Optional[str | UUID] = None,
25
+ new_dataset_workspace: Optional[str | UUID] = None,
26
+ lakehouse: Optional[str | UUID] = None,
27
+ lakehouse_workspace: Optional[str | UUID] = None,
27
28
  ):
28
29
  """
29
30
  Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model.
@@ -35,18 +36,18 @@ def migrate_calc_tables_to_lakehouse(
35
36
  Name of the import/DirectQuery semantic model.
36
37
  new_dataset : str
37
38
  Name of the Direct Lake semantic model.
38
- workspace : str, default=None
39
+ workspace : str | uuid.UUID, default=None
39
40
  The Fabric workspace name in which the import/DirectQuery semantic model exists.
40
41
  Defaults to None which resolves to the workspace of the attached lakehouse
41
42
  or if no lakehouse attached, resolves to the workspace of the notebook.
42
- new_dataset_workspace : str
43
+ new_dataset_workspace : str | uuid.UUID
43
44
  The Fabric workspace name in which the Direct Lake semantic model will be created.
44
45
  Defaults to None which resolves to the workspace of the attached lakehouse
45
46
  or if no lakehouse attached, resolves to the workspace of the notebook.
46
- lakehouse : str, default=None
47
+ lakehouse : str | uuid.UUID, default=None
47
48
  The Fabric lakehouse used by the Direct Lake semantic model.
48
49
  Defaults to None which resolves to the lakehouse attached to the notebook.
49
- lakehouse_workspace : str, default=None
50
+ lakehouse_workspace : str | uuid.UUID, default=None
50
51
  The Fabric workspace used by the lakehouse.
51
52
  Defaults to None which resolves to the workspace of the attached lakehouse
52
53
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -57,22 +58,16 @@ def migrate_calc_tables_to_lakehouse(
57
58
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
58
59
  )
59
60
 
60
- workspace = fabric.resolve_workspace_name(workspace)
61
-
62
- if new_dataset_workspace is None:
63
- new_dataset_workspace = workspace
64
-
65
- if lakehouse_workspace is None:
66
- lakehouse_workspace = new_dataset_workspace
67
- lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace)
68
- else:
69
- lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace)
70
-
71
- if lakehouse is None:
72
- lakehouse_id = fabric.get_lakehouse_id()
73
- lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
74
- else:
75
- lakehouse_id = resolve_lakehouse_id(lakehouse, lakehouse_workspace)
61
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
62
+ (new_dataset_workspace_name, new_dataset_workspace_id) = (
63
+ resolve_workspace_name_and_id(new_dataset_workspace)
64
+ )
65
+ (lakehouse_workspace_id, lakehouse_workspace_name) = resolve_workspace_name_and_id(
66
+ lakehouse_workspace
67
+ )
68
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
69
+ lakehouse, lakehouse_workspace
70
+ )
76
71
 
77
72
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
78
73
  dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
@@ -90,7 +85,7 @@ def migrate_calc_tables_to_lakehouse(
90
85
 
91
86
  if dtName in lakeTables["Table Name"].values:
92
87
  print(
93
- f"{icons.red_dot} The '{tName}' table already exists as '{dtName}' in the '{lakehouse}' lakehouse in the '{workspace}' workspace."
88
+ f"{icons.red_dot} The '{tName}' table already exists as '{dtName}' in the '{lakehouse_name}' lakehouse in the '{lakehouse_workspace_name}' workspace."
94
89
  )
95
90
  killFunction = True
96
91
 
@@ -99,7 +94,7 @@ def migrate_calc_tables_to_lakehouse(
99
94
 
100
95
  if len(dfP_filt) == 0:
101
96
  print(
102
- f"{icons.yellow_dot} The '{dataset}' semantic model in the '{workspace}' workspace has no calculated tables."
97
+ f"{icons.yellow_dot} The '{dataset}' semantic model in the '{workspace_name}' workspace has no calculated tables."
103
98
  )
104
99
  return
105
100
 
@@ -175,7 +170,6 @@ def migrate_calc_tables_to_lakehouse(
175
170
  if str(c.Type) == "Calculated"
176
171
  and c.Name == new_column_name
177
172
  )
178
-
179
173
  if dataType == "Int64":
180
174
  df[new_column_name] = df[
181
175
  new_column_name
@@ -197,7 +191,7 @@ def migrate_calc_tables_to_lakehouse(
197
191
 
198
192
  save_as_delta_table(
199
193
  dataframe=df,
200
- table_name=delta_table_name,
194
+ delta_table_name=delta_table_name,
201
195
  lakehouse=lakehouse,
202
196
  workspace=lakehouse_workspace,
203
197
  write_mode="overwrite",
@@ -231,20 +225,21 @@ def migrate_calc_tables_to_lakehouse(
231
225
 
232
226
  print(
233
227
  f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' "
234
- f"in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
228
+ f"in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace_name}' workspace."
235
229
  )
236
- except Exception:
230
+ except Exception as e:
237
231
  print(
238
232
  f"{icons.red_dot} Failed to create calculated table '{t.Name}' as a delta table in the lakehouse."
239
233
  )
234
+ print(e)
240
235
 
241
236
 
242
237
  @log
243
238
  def migrate_field_parameters(
244
239
  dataset: str,
245
240
  new_dataset: str,
246
- workspace: Optional[str] = None,
247
- new_dataset_workspace: Optional[str] = None,
241
+ workspace: Optional[str | UUID] = None,
242
+ new_dataset_workspace: Optional[str | UUID] = None,
248
243
  ):
249
244
  """
250
245
  Migrates field parameters from one semantic model to another.
@@ -255,11 +250,11 @@ def migrate_field_parameters(
255
250
  Name of the import/DirectQuery semantic model.
256
251
  new_dataset : str
257
252
  Name of the Direct Lake semantic model.
258
- workspace : str, default=None
253
+ workspace : str | uuid.UUID, default=None
259
254
  The Fabric workspace name in which the import/DirectQuery semantic model exists.
260
255
  Defaults to None which resolves to the workspace of the attached lakehouse
261
256
  or if no lakehouse attached, resolves to the workspace of the notebook.
262
- new_dataset_workspace : str
257
+ new_dataset_workspace : str | uuid.UUID, default=None
263
258
  The Fabric workspace name in which the Direct Lake semantic model will be created.
264
259
  Defaults to None which resolves to the workspace of the attached lakehouse
265
260
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -270,15 +265,11 @@ def migrate_field_parameters(
270
265
  sempy.fabric._client._utils._init_analysis_services()
271
266
  import Microsoft.AnalysisServices.Tabular as TOM
272
267
 
273
- if workspace is None:
274
- workspace_id = fabric.get_workspace_id()
275
- workspace = fabric.resolve_workspace_name(workspace_id)
276
-
277
- if new_dataset_workspace is None:
278
- new_dataset_workspace = workspace
279
-
280
268
  icons.sll_tags.append("DirectLakeMigration")
281
269
  fabric.refresh_tom_cache(workspace=workspace)
270
+ (new_dataset_workspace_name, new_dataset_workspace_id) = (
271
+ resolve_workspace_name_and_id(new_dataset_workspace)
272
+ )
282
273
 
283
274
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
284
275
  dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
@@ -399,7 +390,7 @@ def migrate_field_parameters(
399
390
  tom.model.Tables[tName].Columns["Value3"].Name = col3
400
391
 
401
392
  print(
402
- f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace}' workspace."
393
+ f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace_name}' workspace."
403
394
  )
404
395
  except Exception:
405
396
  print(
@@ -1,25 +1,25 @@
1
1
  import sempy.fabric as fabric
2
2
  import re
3
- from ..lakehouse._get_lakehouse_tables import get_lakehouse_tables
4
- from .._helper_functions import (
5
- resolve_lakehouse_name,
3
+ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
4
+ from sempy_labs._helper_functions import (
6
5
  format_dax_object_name,
7
6
  retry,
8
7
  )
9
- from ..tom import connect_semantic_model
8
+ from sempy_labs.tom import connect_semantic_model
10
9
  from typing import Optional
11
10
  from sempy._utils._log import log
12
11
  import sempy_labs._icons as icons
12
+ from uuid import UUID
13
13
 
14
14
 
15
15
  @log
16
16
  def migrate_calc_tables_to_semantic_model(
17
17
  dataset: str,
18
18
  new_dataset: str,
19
- workspace: Optional[str] = None,
20
- new_dataset_workspace: Optional[str] = None,
21
- lakehouse: Optional[str] = None,
22
- lakehouse_workspace: Optional[str] = None,
19
+ workspace: Optional[str | UUID] = None,
20
+ new_dataset_workspace: Optional[str | UUID] = None,
21
+ lakehouse: Optional[str | UUID] = None,
22
+ lakehouse_workspace: Optional[str | UUID] = None,
23
23
  ):
24
24
  """
25
25
  Creates new tables in the Direct Lake semantic model based on the lakehouse tables created using the 'migrate_calc_tables_to_lakehouse' function.
@@ -30,18 +30,18 @@ def migrate_calc_tables_to_semantic_model(
30
30
  Name of the import/DirectQuery semantic model.
31
31
  new_dataset : str
32
32
  Name of the Direct Lake semantic model.
33
- workspace : str, default=None
33
+ workspace : str | uuid.UUID, default=None
34
34
  The Fabric workspace name in which the import/DirectQuery semantic model exists.
35
35
  Defaults to None which resolves to the workspace of the attached lakehouse
36
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
37
- new_dataset_workspace : str
37
+ new_dataset_workspace : str | uuid.UUID, default=None
38
38
  The Fabric workspace name in which the Direct Lake semantic model will be created.
39
39
  Defaults to None which resolves to the workspace of the attached lakehouse
40
40
  or if no lakehouse attached, resolves to the workspace of the notebook.
41
- lakehouse : str, default=None
41
+ lakehouse : str | uuid.UUID, default=None
42
42
  The Fabric lakehouse used by the Direct Lake semantic model.
43
43
  Defaults to None which resolves to the lakehouse attached to the notebook.
44
- lakehouse_workspace : str, default=None
44
+ lakehouse_workspace : str | uuid.UUID, default=None
45
45
  The Fabric workspace used by the lakehouse.
46
46
  Defaults to None which resolves to the workspace of the attached lakehouse
47
47
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -52,18 +52,8 @@ def migrate_calc_tables_to_semantic_model(
52
52
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
53
53
  )
54
54
 
55
- workspace = fabric.resolve_workspace_name(workspace)
56
55
  fabric.refresh_tom_cache(workspace=workspace)
57
56
 
58
- if new_dataset_workspace is None:
59
- new_dataset_workspace = workspace
60
-
61
- if lakehouse_workspace is None:
62
- lakehouse_workspace = new_dataset_workspace
63
- if lakehouse is None:
64
- lakehouse_id = fabric.get_lakehouse_id()
65
- lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
66
-
67
57
  icons.sll_tags.append("DirectLakeMigration")
68
58
 
69
59
  # Get calc tables but not field parameters
@@ -1,23 +1,24 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import re
4
- from .._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  create_relationship_name,
6
6
  retry,
7
7
  format_dax_object_name,
8
8
  )
9
- from ..tom import connect_semantic_model
9
+ from sempy_labs.tom import connect_semantic_model
10
10
  from typing import Optional
11
11
  from sempy._utils._log import log
12
12
  import sempy_labs._icons as icons
13
+ from uuid import UUID
13
14
 
14
15
 
15
16
  @log
16
17
  def migrate_model_objects_to_semantic_model(
17
18
  dataset: str,
18
19
  new_dataset: str,
19
- workspace: Optional[str] = None,
20
- new_dataset_workspace: Optional[str] = None,
20
+ workspace: Optional[str | UUID] = None,
21
+ new_dataset_workspace: Optional[str | UUID] = None,
21
22
  ):
22
23
  """
23
24
  Adds the rest of the model objects (besides tables/columns) and their properties to a Direct Lake semantic model based on an import/DirectQuery semantic model.
@@ -28,11 +29,11 @@ def migrate_model_objects_to_semantic_model(
28
29
  Name of the import/DirectQuery semantic model.
29
30
  new_dataset : str
30
31
  Name of the Direct Lake semantic model.
31
- workspace : str, default=None
32
+ workspace : str | uuid.UUID, default=None
32
33
  The Fabric workspace name in which the import/DirectQuery semantic model exists.
33
34
  Defaults to None which resolves to the workspace of the attached lakehouse
34
35
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
- new_dataset_workspace : str
36
+ new_dataset_workspace : str | uuid.UUID, default=None
36
37
  The Fabric workspace name in which the Direct Lake semantic model will be created.
37
38
  Defaults to None which resolves to the workspace of the attached lakehouse
38
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -47,12 +48,7 @@ def migrate_model_objects_to_semantic_model(
47
48
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
48
49
  )
49
50
 
50
- workspace = fabric.resolve_workspace_name(workspace)
51
51
  fabric.refresh_tom_cache(workspace=workspace)
52
-
53
- if new_dataset_workspace is None:
54
- new_dataset_workspace = workspace
55
-
56
52
  icons.sll_tags.append("DirectLakeMigration")
57
53
 
58
54
  dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
@@ -1,22 +1,23 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from ..directlake._generate_shared_expression import generate_shared_expression
4
- from .._helper_functions import resolve_lakehouse_name, retry
5
- from ..lakehouse._lakehouse import lakehouse_attached
6
- from ..tom import connect_semantic_model
3
+ from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
4
+ from sempy_labs._helper_functions import retry
5
+ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
6
+ from sempy_labs.tom import connect_semantic_model
7
7
  from typing import Optional
8
8
  from sempy._utils._log import log
9
9
  import sempy_labs._icons as icons
10
+ from uuid import UUID
10
11
 
11
12
 
12
13
  @log
13
14
  def migrate_tables_columns_to_semantic_model(
14
15
  dataset: str,
15
16
  new_dataset: str,
16
- workspace: Optional[str] = None,
17
- new_dataset_workspace: Optional[str] = None,
18
- lakehouse: Optional[str] = None,
19
- lakehouse_workspace: Optional[str] = None,
17
+ workspace: Optional[str | UUID] = None,
18
+ new_dataset_workspace: Optional[str | UUID] = None,
19
+ lakehouse: Optional[str | UUID] = None,
20
+ lakehouse_workspace: Optional[str | UUID] = None,
20
21
  ):
21
22
  """
22
23
  Adds tables/columns to the new Direct Lake semantic model based on an import/DirectQuery semantic model.
@@ -49,18 +50,6 @@ def migrate_tables_columns_to_semantic_model(
49
50
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
50
51
  )
51
52
 
52
- workspace = fabric.resolve_workspace_name(workspace)
53
-
54
- if new_dataset_workspace is None:
55
- new_dataset_workspace = workspace
56
-
57
- if lakehouse_workspace is None:
58
- lakehouse_workspace = new_dataset_workspace
59
-
60
- if lakehouse is None:
61
- lakehouse_id = fabric.get_lakehouse_id()
62
- lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
63
-
64
53
  icons.sll_tags.append("DirectLakeMigration")
65
54
 
66
55
  # Check that lakehouse is attached to the notebook
@@ -72,11 +61,13 @@ def migrate_tables_columns_to_semantic_model(
72
61
  "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
73
62
  )
74
63
  shEx = generate_shared_expression(
75
- item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
64
+ item_name=lakehouse,
65
+ item_type="Lakehouse",
66
+ workspace=lakehouse_workspace,
67
+ use_sql_endpoint=False,
76
68
  )
77
69
 
78
70
  fabric.refresh_tom_cache(workspace=workspace)
79
-
80
71
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
81
72
  dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
82
73
  dfT.rename(columns={"Type": "Table Type"}, inplace=True)
@@ -136,7 +127,7 @@ def migrate_tables_columns_to_semantic_model(
136
127
  tDC = r["Data Category"]
137
128
  tHid = bool(r["Hidden"])
138
129
  tDesc = r["Description"]
139
- ent_name = tName.replace(" ", "_")
130
+ ent_name = tName # .replace(" ", "_")
140
131
  for char in icons.special_characters:
141
132
  ent_name = ent_name.replace(char, "")
142
133
 
@@ -0,0 +1,23 @@
1
+ from ._functions import (
2
+ list_ml_models,
3
+ create_ml_model,
4
+ delete_ml_model,
5
+ activate_ml_model_endpoint_version,
6
+ deactivate_all_ml_model_endpoint_versions,
7
+ deactivate_ml_model_endpoint_version,
8
+ list_ml_model_endpoint_versions,
9
+ score_ml_model_endpoint,
10
+ score_ml_model_endpoint_version,
11
+ )
12
+
13
+ __all__ = [
14
+ "list_ml_models",
15
+ "create_ml_model",
16
+ "delete_ml_model",
17
+ "activate_ml_model_endpoint_version",
18
+ "deactivate_all_ml_model_endpoint_versions",
19
+ "deactivate_ml_model_endpoint_version",
20
+ "list_ml_model_endpoint_versions",
21
+ "score_ml_model_endpoint",
22
+ "score_ml_model_endpoint_version",
23
+ ]