semantic-link-labs 0.9.9__py3-none-any.whl → 0.9.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (49) hide show
  1. {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/METADATA +30 -22
  2. {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/RECORD +47 -40
  3. {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +28 -1
  5. sempy_labs/_clear_cache.py +12 -0
  6. sempy_labs/_dax.py +8 -2
  7. sempy_labs/_delta_analyzer.py +17 -26
  8. sempy_labs/_environments.py +19 -1
  9. sempy_labs/_generate_semantic_model.py +7 -8
  10. sempy_labs/_helper_functions.py +351 -151
  11. sempy_labs/_kql_databases.py +18 -0
  12. sempy_labs/_kusto.py +137 -0
  13. sempy_labs/_list_functions.py +18 -36
  14. sempy_labs/_model_bpa_rules.py +13 -3
  15. sempy_labs/_notebooks.py +44 -11
  16. sempy_labs/_semantic_models.py +93 -1
  17. sempy_labs/_sql.py +3 -2
  18. sempy_labs/_tags.py +194 -0
  19. sempy_labs/_variable_libraries.py +89 -0
  20. sempy_labs/_vertipaq.py +6 -6
  21. sempy_labs/_vpax.py +386 -0
  22. sempy_labs/_warehouses.py +3 -3
  23. sempy_labs/admin/__init__.py +14 -0
  24. sempy_labs/admin/_artifacts.py +3 -3
  25. sempy_labs/admin/_capacities.py +161 -1
  26. sempy_labs/admin/_dataflows.py +45 -0
  27. sempy_labs/admin/_items.py +16 -11
  28. sempy_labs/admin/_tags.py +126 -0
  29. sempy_labs/admin/_tenant.py +5 -5
  30. sempy_labs/directlake/_generate_shared_expression.py +29 -26
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +55 -5
  32. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  33. sempy_labs/lakehouse/__init__.py +16 -0
  34. sempy_labs/lakehouse/_blobs.py +115 -63
  35. sempy_labs/lakehouse/_get_lakehouse_columns.py +41 -18
  36. sempy_labs/lakehouse/_get_lakehouse_tables.py +62 -47
  37. sempy_labs/lakehouse/_helper.py +211 -0
  38. sempy_labs/lakehouse/_lakehouse.py +45 -36
  39. sempy_labs/lakehouse/_livy_sessions.py +137 -0
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -12
  41. sempy_labs/migration/_refresh_calc_tables.py +7 -6
  42. sempy_labs/report/_download_report.py +1 -1
  43. sempy_labs/report/_generate_report.py +5 -1
  44. sempy_labs/report/_reportwrapper.py +31 -18
  45. sempy_labs/tom/_model.py +104 -35
  46. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -9
  47. sempy_labs/report/_bpareporttemplate/.platform +0 -11
  48. {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/licenses/LICENSE +0 -0
  49. {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/top_level.txt +0 -0
@@ -6,10 +6,9 @@ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_lakehouse_name,
8
8
  resolve_lakehouse_id,
9
- create_abfss_path,
10
9
  retry,
11
10
  generate_guid,
12
- _create_spark_session,
11
+ save_as_delta_table,
13
12
  )
14
13
  from sempy_labs.tom import connect_semantic_model
15
14
  from typing import Optional
@@ -98,8 +97,6 @@ def migrate_calc_tables_to_lakehouse(
98
97
  if killFunction:
99
98
  return
100
99
 
101
- spark = _create_spark_session()
102
-
103
100
  if len(dfP_filt) == 0:
104
101
  print(
105
102
  f"{icons.yellow_dot} The '{dataset}' semantic model in the '{workspace}' workspace has no calculated tables."
@@ -198,14 +195,12 @@ def migrate_calc_tables_to_lakehouse(
198
195
 
199
196
  delta_table_name = t.Name.replace(" ", "_").lower()
200
197
 
201
- spark_df = spark.createDataFrame(df)
202
- filePath = create_abfss_path(
203
- lakehouse_id=lakehouse_id,
204
- lakehouse_workspace_id=lakehouse_workspace_id,
205
- delta_table_name=delta_table_name,
206
- )
207
- spark_df.write.mode("overwrite").format("delta").save(
208
- filePath
198
+ save_as_delta_table(
199
+ dataframe=df,
200
+ table_name=delta_table_name,
201
+ lakehouse=lakehouse,
202
+ workspace=lakehouse_workspace,
203
+ write_mode="overwrite",
209
204
  )
210
205
 
211
206
  @retry(
@@ -1,7 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import re
4
- from sempy_labs._helper_functions import retry
5
4
  from sempy_labs.tom import connect_semantic_model
6
5
  from typing import Optional
7
6
  from sempy._utils._log import log
@@ -10,7 +9,8 @@ from uuid import UUID
10
9
  from sempy_labs._helper_functions import (
11
10
  resolve_workspace_name_and_id,
12
11
  resolve_dataset_name_and_id,
13
- _create_spark_session,
12
+ save_as_delta_table,
13
+ retry,
14
14
  )
15
15
 
16
16
 
@@ -29,7 +29,6 @@ def refresh_calc_tables(dataset: str | UUID, workspace: Optional[str | UUID] = N
29
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
30
  """
31
31
 
32
- spark = _create_spark_session()
33
32
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
34
33
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
35
34
  icons.sll_tags.append("DirectLakeMigration")
@@ -117,10 +116,12 @@ def refresh_calc_tables(dataset: str | UUID, workspace: Optional[str | UUID] = N
117
116
  f"{icons.in_progress} Refresh of the '{delta_table_name}' table within the lakehouse is in progress..."
118
117
  )
119
118
 
120
- spark_df = spark.createDataFrame(df)
121
- spark_df.write.mode("overwrite").format("delta").saveAsTable(
122
- delta_table_name
119
+ save_as_delta_table(
120
+ dataframe=df,
121
+ table_name=delta_table_name,
122
+ write_mode="overwrite",
123
123
  )
124
+
124
125
  print(
125
126
  f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
126
127
  )
@@ -22,7 +22,7 @@ def download_report(
22
22
  """
23
23
  Downloads the specified report from the specified workspace to a Power BI .pbix file.
24
24
 
25
- This is a wrapper function for the following API: `Reports - Export Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-report-in-group>`.
25
+ This is a wrapper function for the following API: `Reports - Export Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-report-in-group>`_.
26
26
 
27
27
  Parameters
28
28
  ----------
@@ -178,6 +178,7 @@ def update_report_from_reportjson(
178
178
  )
179
179
 
180
180
 
181
+ @log
181
182
  def get_report_definition(
182
183
  report: str | UUID,
183
184
  workspace: Optional[str | UUID] = None,
@@ -206,7 +207,10 @@ def get_report_definition(
206
207
  """
207
208
 
208
209
  return get_item_definition(
209
- item=report, type="Report", workspace=workspace, return_dataframe=True
210
+ item=report,
211
+ type="Report",
212
+ workspace=workspace,
213
+ return_dataframe=return_dataframe,
210
214
  )
211
215
 
212
216
 
@@ -105,27 +105,40 @@ class ReportWrapper:
105
105
  )
106
106
  )
107
107
 
108
+ report_level_measures = list(
109
+ self.list_report_level_measures()["Measure Name"].values
110
+ )
108
111
  with connect_semantic_model(
109
112
  dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
110
113
  ) as tom:
111
- for index, row in dataframe.iterrows():
112
- obj_type = row["Object Type"]
113
- if obj_type == "Measure":
114
- dataframe.at[index, "Valid Semantic Model Object"] = any(
115
- o.Name == row["Object Name"] for o in tom.all_measures()
116
- )
117
- elif obj_type == "Column":
118
- dataframe.at[index, "Valid Semantic Model Object"] = any(
119
- format_dax_object_name(c.Parent.Name, c.Name)
120
- == format_dax_object_name(row["Table Name"], row["Object Name"])
121
- for c in tom.all_columns()
122
- )
123
- elif obj_type == "Hierarchy":
124
- dataframe.at[index, "Valid Semantic Model Object"] = any(
125
- format_dax_object_name(h.Parent.Name, h.Name)
126
- == format_dax_object_name(row["Table Name"], row["Object Name"])
127
- for h in tom.all_hierarchies()
128
- )
114
+ measure_names = {m.Name for m in tom.all_measures()}
115
+ measure_names.update(report_level_measures)
116
+ column_names = {
117
+ format_dax_object_name(c.Parent.Name, c.Name) for c in tom.all_columns()
118
+ }
119
+ hierarchy_names = {
120
+ format_dax_object_name(h.Parent.Name, h.Name)
121
+ for h in tom.all_hierarchies()
122
+ }
123
+
124
+ # Vectorized checks
125
+ def is_valid(row):
126
+ obj_type = row["Object Type"]
127
+ obj_name = row["Object Name"]
128
+ if obj_type == "Measure":
129
+ return obj_name in measure_names
130
+ elif obj_type == "Column":
131
+ return (
132
+ format_dax_object_name(row["Table Name"], obj_name) in column_names
133
+ )
134
+ elif obj_type == "Hierarchy":
135
+ return (
136
+ format_dax_object_name(row["Table Name"], obj_name)
137
+ in hierarchy_names
138
+ )
139
+ return False
140
+
141
+ dataframe["Valid Semantic Model Object"] = dataframe.apply(is_valid, axis=1)
129
142
  return dataframe
130
143
 
131
144
  def _update_single_file(self, file_name: str, new_payload):
sempy_labs/tom/_model.py CHANGED
@@ -802,23 +802,27 @@ class TOMWrapper:
802
802
  if permission not in ["Read", "None", "Default"]:
803
803
  raise ValueError(f"{icons.red_dot} Invalid 'permission' value.")
804
804
 
805
- cp = TOM.ColumnPermission()
806
- cp.Column = self.model.Tables[table_name].Columns[column_name]
807
- cp.MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
808
-
809
- if any(
810
- c.Name == column_name and t.Name == table_name and r.Name == role_name
811
- for r in self.model.Roles
812
- for t in r.TablePermissions
813
- for c in t.ColumnPermissions
814
- ):
815
- self.model.Roles[role_name].TablePermissions[table_name].ColumnPermissions[
805
+ r = self.model.Roles[role_name]
806
+ tables = [t.Name for t in r.TablePermissions]
807
+ # Add table permission if it does not exist
808
+ if table_name not in tables:
809
+ tp = TOM.TablePermission()
810
+ tp.Table = self.model.Tables[table_name]
811
+ r.TablePermissions.Add(tp)
812
+ columns = [c.Name for c in r.TablePermissions[table_name].ColumnPermissions]
813
+ # Add column permission if it does not exist
814
+ if column_name not in columns:
815
+ cp = TOM.ColumnPermission()
816
+ cp.Column = self.model.Tables[table_name].Columns[column_name]
817
+ cp.MetadataPermission = System.Enum.Parse(
818
+ TOM.MetadataPermission, permission
819
+ )
820
+ r.TablePermissions[table_name].ColumnPermissions.Add(cp)
821
+ # Set column permission if it already exists
822
+ else:
823
+ r.TablePermissions[table_name].ColumnPermissions[
816
824
  column_name
817
825
  ].MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
818
- else:
819
- self.model.Roles[role_name].TablePermissions[
820
- table_name
821
- ].ColumnPermissions.Add(cp)
822
826
 
823
827
  def add_hierarchy(
824
828
  self,
@@ -938,19 +942,23 @@ class TOMWrapper:
938
942
  import Microsoft.AnalysisServices.Tabular as TOM
939
943
  import System
940
944
 
941
- if cross_filtering_behavior is None:
945
+ if not cross_filtering_behavior:
942
946
  cross_filtering_behavior = "Automatic"
943
- if security_filtering_behavior is None:
947
+ if not security_filtering_behavior:
944
948
  security_filtering_behavior = "OneDirection"
945
949
 
946
- from_cardinality = from_cardinality.capitalize()
947
- to_cardinality = to_cardinality.capitalize()
948
- cross_filtering_behavior = cross_filtering_behavior.capitalize()
949
- security_filtering_behavior = security_filtering_behavior.capitalize()
950
+ for var_name in [
951
+ "from_cardinality",
952
+ "to_cardinality",
953
+ "cross_filtering_behavior",
954
+ "security_filtering_behavior",
955
+ ]:
956
+ locals()[var_name] = locals()[var_name].capitalize()
957
+
958
+ cross_filtering_behavior = cross_filtering_behavior.replace("direct", "Direct")
950
959
  security_filtering_behavior = security_filtering_behavior.replace(
951
960
  "direct", "Direct"
952
961
  )
953
- cross_filtering_behavior = cross_filtering_behavior.replace("direct", "Direct")
954
962
 
955
963
  rel = TOM.SingleColumnRelationship()
956
964
  rel.FromColumn = self.model.Tables[from_table].Columns[from_column]
@@ -962,13 +970,16 @@ class TOMWrapper:
962
970
  TOM.RelationshipEndCardinality, to_cardinality
963
971
  )
964
972
  rel.IsActive = is_active
965
- rel.CrossFilteringBehavior = System.Enum.Parse(
966
- TOM.CrossFilteringBehavior, cross_filtering_behavior
967
- )
968
- rel.SecurityFilteringBehavior = System.Enum.Parse(
969
- TOM.SecurityFilteringBehavior, security_filtering_behavior
970
- )
971
- rel.RelyOnReferentialIntegrity = rely_on_referential_integrity
973
+ if cross_filtering_behavior != "Automatic":
974
+ rel.CrossFilteringBehavior = System.Enum.Parse(
975
+ TOM.CrossFilteringBehavior, cross_filtering_behavior
976
+ )
977
+ if security_filtering_behavior != "OneDirection":
978
+ rel.SecurityFilteringBehavior = System.Enum.Parse(
979
+ TOM.SecurityFilteringBehavior, security_filtering_behavior
980
+ )
981
+ if rely_on_referential_integrity:
982
+ rel.RelyOnReferentialIntegrity = True
972
983
 
973
984
  self.model.Relationships.Add(rel)
974
985
 
@@ -3655,25 +3666,27 @@ class TOMWrapper:
3655
3666
  import Microsoft.AnalysisServices.Tabular as TOM
3656
3667
  import System
3657
3668
 
3658
- if not self.has_incremental_refresh_policy(table_name=table_name):
3669
+ if not self.has_incremental_refresh_policy(
3670
+ object=self.model.Tables[table_name]
3671
+ ):
3659
3672
  print(
3660
3673
  f"The '{table_name}' table does not have an incremental refresh policy."
3661
3674
  )
3662
3675
  return
3663
3676
 
3664
- incGran = ["Day", "Month", "Quarter", "Year"]
3677
+ granularities = ["Day", "Month", "Quarter", "Year"]
3665
3678
 
3666
3679
  incremental_granularity = incremental_granularity.capitalize()
3667
3680
  rolling_window_granularity = rolling_window_granularity.capitalize()
3668
3681
 
3669
- if incremental_granularity not in incGran:
3682
+ if incremental_granularity not in granularities:
3670
3683
  raise ValueError(
3671
- f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}."
3684
+ f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {granularities}."
3672
3685
  )
3673
3686
 
3674
- if rolling_window_granularity not in incGran:
3687
+ if rolling_window_granularity not in granularities:
3675
3688
  raise ValueError(
3676
- f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}."
3689
+ f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {granularities}."
3677
3690
  )
3678
3691
 
3679
3692
  if rolling_window_periods < 1:
@@ -5069,6 +5082,62 @@ class TOMWrapper:
5069
5082
  f"{icons.green_dot} The '{table_name}' table has been converted to Import mode."
5070
5083
  )
5071
5084
 
5085
+ def copy_object(
5086
+ self,
5087
+ object,
5088
+ target_dataset: str | UUID,
5089
+ target_workspace: Optional[str | UUID] = None,
5090
+ readonly: bool = False,
5091
+ ):
5092
+ """
5093
+ Copies a semantic model object from the current semantic model to the target semantic model.
5094
+
5095
+ Parameters
5096
+ ----------
5097
+ object : TOM Object
5098
+ The TOM object to be copied to the target semantic model. For example: tom.model.Tables['Sales'].
5099
+ target_dataset : str | uuid.UUID
5100
+ Name or ID of the target semantic model.
5101
+ target_workspace : str | uuid.UUID, default=None
5102
+ The Fabric workspace name or ID.
5103
+ Defaults to None which resolves to the workspace of the attached lakehouse
5104
+ or if no lakehouse attached, resolves to the workspace of the notebook.
5105
+ readonly : bool, default=False
5106
+ Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
5107
+ """
5108
+
5109
+ import Microsoft.AnalysisServices.Tabular as TOM
5110
+
5111
+ clone = object.Clone()
5112
+ with connect_semantic_model(
5113
+ dataset=target_dataset,
5114
+ workspace=target_workspace,
5115
+ readonly=readonly,
5116
+ ) as target_tom:
5117
+ if isinstance(object, TOM.Table):
5118
+ target_tom.model.Tables.Add(clone)
5119
+ elif isinstance(object, TOM.Column):
5120
+ target_tom.model.Tables[object.Parent.Name].Columns.Add(clone)
5121
+ elif isinstance(object, TOM.Measure):
5122
+ target_tom.model.Tables[object.Parent.Name].Measures.Add(clone)
5123
+ elif isinstance(object, TOM.Hierarchy):
5124
+ target_tom.model.Tables[object.Parent.Name].Hierarchies.Add(clone)
5125
+ elif isinstance(object, TOM.Level):
5126
+ target_tom.model.Tables[object.Parent.Parent.Name].Hierarchies[
5127
+ object.Parent.Name
5128
+ ].Levels.Add(clone)
5129
+ elif isinstance(object, TOM.Role):
5130
+ target_tom.model.Roles.Add(clone)
5131
+ elif isinstance(object, TOM.Relationship):
5132
+ target_tom.model.Relationships.Add(clone)
5133
+ else:
5134
+ raise NotImplementedError(
5135
+ f"{icons.red_dot} The '{object.ObjectType}' object type is not supported."
5136
+ )
5137
+ print(
5138
+ f"{icons.green_dot} The '{object.Name}' {str(object.ObjectType).lower()} has been copied to the '{target_dataset}' semantic model within the '{target_workspace}' workspace."
5139
+ )
5140
+
5072
5141
  def close(self):
5073
5142
 
5074
5143
  if not self._readonly and self.model is not None:
@@ -1,9 +0,0 @@
1
- {
2
- "version": "1.0",
3
- "remoteArtifacts": [
4
- {
5
- "reportId": "6a91c344-dba8-4ebf-bedb-e07134f2a204"
6
- }
7
- ],
8
- "securityBindingsSignature": "AQAAANCMnd8BFdERjHoAwE/Cl+sBAAAAMAVu7l76YU6Sl11KOiJdgQAAAAACAAAAAAAQZgAAAAEAACAAAAD1Ty+c7tZLT9/Sjguxhn/5ivfLWfUMNtgudyJ3BKzzZgAAAAAOgAAAAAIAACAAAABAyGf+iKMwvmNtcoRczjgebeIm0nlc9SFYTBYv3N7yvVADAADQN3JsIsvJUcKKc9WMo2EhiE21odezpd35tb+yudHwA/RYhypMA3fwiCTwArLefBZQ3vZ7KYh4MjihXS07i9o1XVqxAmDoli83Yhs/Wei+0HIfYOT5HOVHLUEul5x41Yx/7Bdfhc881SK6IoaJogBdwsiJVxPne+niMYqJQA6qLEPyJ33g6ucUxLA40lwdbN2cMWFzRn6tymmicDPwH0hcGPDMWwseAU+OuUeidkneRWhUGs6lkiiXLiO6kmY5RKq+S4FdtR19/e1B6EjAd94zSw+M5jQzYxn4eCZzWYiB+8Zd/jy07lfyLoGwagNqiQzbcNONqQd5w0n+8/+n4zGkBi2UojfRXoGaYDirQeZMTbt3pfPx2PArxsJ8dF0iT634pHiCF1ZFdtY+79JaFLUUG+Yf7JJv8IxuuuF74tAp4NYmuOij4hTDaf8Jafa5IoRVh7ICkwrjJyVQ8dG7I3tr0VvR+toBPG3Zlbm9BijcaBxhh1AINhnRAIkENOnPFQVH7l3Ml7B60H8Tst6ic3ihCCMYjtmN+NNWqFrJKT2trilh5TAxN+ei4H5fPwM9S7zb2bH5jhExcYTtoe7iCzxOvBsoYoFM+7FMjn9R2FATNICktYdbKDo1Of+u4oZ1+RsvBHQBVaMhSCoZ7+K5T5pZayNK3V2UID3wOuLOYvouxXXr4NVFsdgiV2oMuxTWeqmd/4bLxeqe3uTkGFmQU4mumF2YVsNbdO3IcRXhhrCCZ27ffzXBsH+lE3EhusD37Z0dsVbVVlG8AHXCh7Atgd8n73/eSI5mvj36DCOSRBVauItIATIa2FXueKA7vU6lRDYBSX8FCC2qkeN6dWpMoN5uXXEBsb5Yot1Fgrovcyl5lk7rh772Xon4FaIYFHZpklsY3JK5EXp3bF8UOE6ByN1ZucmkGgYRcTT/up/Uc86TLN6env9XXL4FQYPlReiOGWKBLVi9OoXGRLDshspniULtV3EwQ6WsjF2AyQ+WdLj3bbWKzG5Mg9jvANLrjycZAGWskh4X5JDGiv4TiJmnYQ/xPZAKKiowpVIHikLeG76uXFI+bxtpihV9+DaEJy4UxisHQxwuvUsQs38u3SHgpJmT8CNssZl41+T/IJdoQwJFLUAAAACnUQZGV9DvcOyrj8HBpXBVB5PuOQDxLB4HZOevHqCB5dc5z787E93B51QmN7I15fF6GCdWwN5f94gv1er2dtN3"
9
- }
@@ -1,11 +0,0 @@
1
- {
2
- "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json",
3
- "metadata": {
4
- "type": "Report",
5
- "displayName": "BPAReport"
6
- },
7
- "config": {
8
- "version": "2.0",
9
- "logicalId": "a201f2cd-fd25-465f-bfbc-33b151e38b31"
10
- }
11
- }