semantic-link-labs 0.10.0__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (95) hide show
  1. {semantic_link_labs-0.10.0.dist-info → semantic_link_labs-0.11.0.dist-info}/METADATA +9 -6
  2. {semantic_link_labs-0.10.0.dist-info → semantic_link_labs-0.11.0.dist-info}/RECORD +95 -87
  3. sempy_labs/__init__.py +11 -1
  4. sempy_labs/_a_lib_info.py +2 -0
  5. sempy_labs/_capacities.py +2 -0
  6. sempy_labs/_connections.py +11 -0
  7. sempy_labs/_dashboards.py +9 -4
  8. sempy_labs/_data_pipelines.py +5 -0
  9. sempy_labs/_dataflows.py +284 -17
  10. sempy_labs/_daxformatter.py +80 -0
  11. sempy_labs/_delta_analyzer_history.py +4 -1
  12. sempy_labs/_deployment_pipelines.py +4 -0
  13. sempy_labs/_documentation.py +3 -0
  14. sempy_labs/_environments.py +10 -1
  15. sempy_labs/_eventhouses.py +12 -5
  16. sempy_labs/_eventstreams.py +11 -3
  17. sempy_labs/_external_data_shares.py +8 -2
  18. sempy_labs/_gateways.py +26 -5
  19. sempy_labs/_git.py +11 -0
  20. sempy_labs/_graphQL.py +10 -3
  21. sempy_labs/_helper_functions.py +62 -10
  22. sempy_labs/_job_scheduler.py +54 -7
  23. sempy_labs/_kql_databases.py +11 -2
  24. sempy_labs/_kql_querysets.py +11 -3
  25. sempy_labs/_list_functions.py +17 -45
  26. sempy_labs/_managed_private_endpoints.py +11 -2
  27. sempy_labs/_mirrored_databases.py +17 -3
  28. sempy_labs/_mirrored_warehouses.py +9 -3
  29. sempy_labs/_ml_experiments.py +11 -3
  30. sempy_labs/_ml_models.py +11 -3
  31. sempy_labs/_model_bpa_rules.py +2 -0
  32. sempy_labs/_mounted_data_factories.py +12 -8
  33. sempy_labs/_notebooks.py +6 -3
  34. sempy_labs/_refresh_semantic_model.py +1 -0
  35. sempy_labs/_semantic_models.py +107 -0
  36. sempy_labs/_spark.py +7 -0
  37. sempy_labs/_sql_endpoints.py +208 -0
  38. sempy_labs/_sqldatabase.py +13 -4
  39. sempy_labs/_tags.py +5 -1
  40. sempy_labs/_user_delegation_key.py +2 -0
  41. sempy_labs/_variable_libraries.py +3 -1
  42. sempy_labs/_warehouses.py +13 -3
  43. sempy_labs/_workloads.py +3 -0
  44. sempy_labs/_workspace_identity.py +3 -0
  45. sempy_labs/_workspaces.py +14 -1
  46. sempy_labs/admin/__init__.py +2 -0
  47. sempy_labs/admin/_activities.py +6 -5
  48. sempy_labs/admin/_apps.py +31 -31
  49. sempy_labs/admin/_artifacts.py +8 -3
  50. sempy_labs/admin/_basic_functions.py +5 -0
  51. sempy_labs/admin/_capacities.py +39 -28
  52. sempy_labs/admin/_datasets.py +51 -51
  53. sempy_labs/admin/_domains.py +17 -1
  54. sempy_labs/admin/_external_data_share.py +8 -2
  55. sempy_labs/admin/_git.py +14 -9
  56. sempy_labs/admin/_items.py +15 -2
  57. sempy_labs/admin/_reports.py +64 -65
  58. sempy_labs/admin/_shared.py +7 -1
  59. sempy_labs/admin/_tags.py +5 -0
  60. sempy_labs/admin/_tenant.py +5 -2
  61. sempy_labs/admin/_users.py +9 -3
  62. sempy_labs/admin/_workspaces.py +88 -0
  63. sempy_labs/directlake/_dl_helper.py +2 -0
  64. sempy_labs/directlake/_generate_shared_expression.py +2 -0
  65. sempy_labs/directlake/_get_directlake_lakehouse.py +2 -4
  66. sempy_labs/directlake/_get_shared_expression.py +2 -0
  67. sempy_labs/directlake/_guardrails.py +2 -0
  68. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +5 -3
  69. sempy_labs/directlake/_warm_cache.py +1 -0
  70. sempy_labs/graph/_groups.py +22 -7
  71. sempy_labs/graph/_teams.py +7 -2
  72. sempy_labs/graph/_users.py +1 -0
  73. sempy_labs/lakehouse/_blobs.py +1 -0
  74. sempy_labs/lakehouse/_get_lakehouse_tables.py +88 -27
  75. sempy_labs/lakehouse/_helper.py +2 -0
  76. sempy_labs/lakehouse/_lakehouse.py +38 -5
  77. sempy_labs/lakehouse/_livy_sessions.py +2 -1
  78. sempy_labs/lakehouse/_shortcuts.py +7 -1
  79. sempy_labs/migration/_direct_lake_to_import.py +2 -0
  80. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  81. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  82. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  83. sempy_labs/report/_download_report.py +2 -1
  84. sempy_labs/report/_generate_report.py +2 -0
  85. sempy_labs/report/_paginated.py +2 -0
  86. sempy_labs/report/_report_bpa.py +110 -122
  87. sempy_labs/report/_report_bpa_rules.py +2 -0
  88. sempy_labs/report/_report_functions.py +7 -0
  89. sempy_labs/report/_reportwrapper.py +86 -48
  90. sempy_labs/theme/__init__.py +12 -0
  91. sempy_labs/theme/_org_themes.py +96 -0
  92. sempy_labs/tom/_model.py +702 -35
  93. {semantic_link_labs-0.10.0.dist-info → semantic_link_labs-0.11.0.dist-info}/WHEEL +0 -0
  94. {semantic_link_labs-0.10.0.dist-info → semantic_link_labs-0.11.0.dist-info}/licenses/LICENSE +0 -0
  95. {semantic_link_labs-0.10.0.dist-info → semantic_link_labs-0.11.0.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py CHANGED
@@ -2,8 +2,10 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  import re
5
+ import os
5
6
  import json
6
7
  from datetime import datetime
8
+ from decimal import Decimal
7
9
  from sempy_labs._helper_functions import (
8
10
  format_dax_object_name,
9
11
  generate_guid,
@@ -14,17 +16,19 @@ from sempy_labs._helper_functions import (
14
16
  resolve_workspace_id,
15
17
  resolve_item_id,
16
18
  resolve_lakehouse_id,
19
+ _validate_weight,
17
20
  )
18
21
  from sempy_labs._list_functions import list_relationships
19
22
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
20
23
  from sempy_labs.directlake._dl_helper import check_fallback_reason
21
24
  from contextlib import contextmanager
22
- from typing import List, Iterator, Optional, Union, TYPE_CHECKING
25
+ from typing import List, Iterator, Optional, Union, TYPE_CHECKING, Literal
23
26
  from sempy._utils._log import log
24
27
  import sempy_labs._icons as icons
25
28
  import ast
26
29
  from uuid import UUID
27
30
  import sempy_labs._authentication as auth
31
+ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
28
32
 
29
33
 
30
34
  if TYPE_CHECKING:
@@ -47,6 +51,13 @@ class TOMWrapper:
47
51
  _tables_added: List[str]
48
52
  _table_map = dict
49
53
  _column_map = dict
54
+ _dax_formatting = {
55
+ "measures": [],
56
+ "calculated_columns": [],
57
+ "calculated_tables": [],
58
+ "calculation_items": [],
59
+ "rls": [],
60
+ }
50
61
 
51
62
  def __init__(self, dataset, workspace, readonly):
52
63
 
@@ -777,7 +788,11 @@ class TOMWrapper:
777
788
  self.model.Roles[role_name].TablePermissions.Add(tp)
778
789
 
779
790
  def set_ols(
780
- self, role_name: str, table_name: str, column_name: str, permission: str
791
+ self,
792
+ role_name: str,
793
+ table_name: str,
794
+ column_name: Optional[str] = None,
795
+ permission: Literal["Default", "None", "Read"] = "Default",
781
796
  ):
782
797
  """
783
798
  Sets the object level security permissions for a column within a role.
@@ -788,9 +803,9 @@ class TOMWrapper:
788
803
  Name of the role.
789
804
  table_name : str
790
805
  Name of the table.
791
- column_name : str
792
- Name of the column.
793
- permission : str
806
+ column_name : str, default=None
807
+ Name of the column. Defaults to None which sets object level security for the entire table.
808
+ permission : Literal["Default", "None", "Read"], default="Default"
794
809
  The object level security permission for the column.
795
810
  `Permission valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.metadatapermission?view=analysisservices-dotnet>`_
796
811
  """
@@ -810,19 +825,29 @@ class TOMWrapper:
810
825
  tp.Table = self.model.Tables[table_name]
811
826
  r.TablePermissions.Add(tp)
812
827
  columns = [c.Name for c in r.TablePermissions[table_name].ColumnPermissions]
813
- # Add column permission if it does not exist
814
- if column_name not in columns:
815
- cp = TOM.ColumnPermission()
816
- cp.Column = self.model.Tables[table_name].Columns[column_name]
817
- cp.MetadataPermission = System.Enum.Parse(
828
+
829
+ # Set column level security if column is specified
830
+ if column_name:
831
+ # Add column permission if it does not exist
832
+ if column_name not in columns:
833
+ cp = TOM.ColumnPermission()
834
+ cp.Column = self.model.Tables[table_name].Columns[column_name]
835
+ cp.MetadataPermission = System.Enum.Parse(
836
+ TOM.MetadataPermission, permission
837
+ )
838
+ r.TablePermissions[table_name].ColumnPermissions.Add(cp)
839
+ # Set column permission if it already exists
840
+ else:
841
+ r.TablePermissions[table_name].ColumnPermissions[
842
+ column_name
843
+ ].MetadataPermission = System.Enum.Parse(
844
+ TOM.MetadataPermission, permission
845
+ )
846
+ # Set table level security if column is not specified
847
+ else:
848
+ r.TablePermissions[table_name].MetadataPermission = System.Enum.Parse(
818
849
  TOM.MetadataPermission, permission
819
850
  )
820
- r.TablePermissions[table_name].ColumnPermissions.Add(cp)
821
- # Set column permission if it already exists
822
- else:
823
- r.TablePermissions[table_name].ColumnPermissions[
824
- column_name
825
- ].MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
826
851
 
827
852
  def add_hierarchy(
828
853
  self,
@@ -904,11 +929,15 @@ class TOMWrapper:
904
929
  from_column: str,
905
930
  to_table: str,
906
931
  to_column: str,
907
- from_cardinality: str,
908
- to_cardinality: str,
909
- cross_filtering_behavior: Optional[str] = None,
932
+ from_cardinality: Literal["Many", "One", "None"],
933
+ to_cardinality: Literal["Many", "One", "None"],
934
+ cross_filtering_behavior: Literal[
935
+ "Automatic", "OneDirection", "BothDirections"
936
+ ] = "Automatic",
910
937
  is_active: bool = True,
911
- security_filtering_behavior: Optional[str] = None,
938
+ security_filtering_behavior: Optional[
939
+ Literal["None", "OneDirection", "BothDirections"]
940
+ ] = None,
912
941
  rely_on_referential_integrity: bool = False,
913
942
  ):
914
943
  """
@@ -924,29 +953,22 @@ class TOMWrapper:
924
953
  Name of the table on the 'to' side of the relationship.
925
954
  to_column : str
926
955
  Name of the column on the 'to' side of the relationship.
927
- from_cardinality : str
928
- The cardinality of the 'from' side of the relationship. Options: ['Many', 'One', 'None'].
929
- to_cardinality : str
930
- The cardinality of the 'to' side of the relationship. Options: ['Many', 'One', 'None'].
931
- cross_filtering_behavior : str, default=None
956
+ from_cardinality : Literal["Many", "One", "None"]
957
+ The cardinality of the 'from' side of the relationship.
958
+ to_cardinality : Literal["Many", "One", "None"]
959
+ The cardinality of the 'to' side of the relationship.
960
+ cross_filtering_behavior : Literal["Automatic", "OneDirection", "BothDirections"], default="Automatic"
932
961
  Setting for the cross filtering behavior of the relationship. Options: ('Automatic', 'OneDirection', 'BothDirections').
933
- Defaults to None which resolves to 'Automatic'.
934
962
  is_active : bool, default=True
935
963
  Setting for whether the relationship is active or not.
936
- security_filtering_behavior : str, default=None
937
- Setting for the security filtering behavior of the relationship. Options: ('None', 'OneDirection', 'BothDirections').
938
- Defaults to None which resolves to 'OneDirection'.
964
+ security_filtering_behavior : Literal["None, "OneDirection", "BothDirections"], default="OneDirection"
965
+ Setting for the security filtering behavior of the relationship.
939
966
  rely_on_referential_integrity : bool, default=False
940
967
  Setting for the rely on referential integrity of the relationship.
941
968
  """
942
969
  import Microsoft.AnalysisServices.Tabular as TOM
943
970
  import System
944
971
 
945
- if not cross_filtering_behavior:
946
- cross_filtering_behavior = "Automatic"
947
- if not security_filtering_behavior:
948
- security_filtering_behavior = "OneDirection"
949
-
950
972
  for var_name in [
951
973
  "from_cardinality",
952
974
  "to_cardinality",
@@ -4716,7 +4738,12 @@ class TOMWrapper:
4716
4738
  TOM.ValueFilterBehaviorType, value_filter_behavior
4717
4739
  )
4718
4740
 
4719
- def add_role_member(self, role_name: str, member: str | List[str]):
4741
+ def add_role_member(
4742
+ self,
4743
+ role_name: str,
4744
+ member: str | List[str],
4745
+ role_member_type: Optional[str] = "User",
4746
+ ):
4720
4747
  """
4721
4748
  Adds an external model role member (AzureAD) to a role.
4722
4749
 
@@ -4726,13 +4753,23 @@ class TOMWrapper:
4726
4753
  The role name.
4727
4754
  member : str | List[str]
4728
4755
  The email address(es) of the member(s) to add.
4756
+ role_member_type : str, default="User"
4757
+ The type of the role member. Default is "User". Other options include "Group" for Azure AD groups.
4758
+ All members must be of the same role_member_type.
4729
4759
  """
4730
4760
 
4731
4761
  import Microsoft.AnalysisServices.Tabular as TOM
4762
+ import System
4732
4763
 
4733
4764
  if isinstance(member, str):
4734
4765
  member = [member]
4735
4766
 
4767
+ role_member_type = role_member_type.capitalize()
4768
+ if role_member_type not in ["User", "Group"]:
4769
+ raise ValueError(
4770
+ f"{icons.red_dot} The '{role_member_type}' is not a valid role member type. Valid options: 'User', 'Group'."
4771
+ )
4772
+
4736
4773
  role = self.model.Roles[role_name]
4737
4774
  current_members = [m.MemberName for m in role.Members]
4738
4775
 
@@ -4741,6 +4778,7 @@ class TOMWrapper:
4741
4778
  rm = TOM.ExternalModelRoleMember()
4742
4779
  rm.IdentityProvider = "AzureAD"
4743
4780
  rm.MemberName = m
4781
+ rm.MemberType = System.Enum.Parse(TOM.RoleMemberType, role_member_type)
4744
4782
  role.Members.Add(rm)
4745
4783
  print(
4746
4784
  f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role."
@@ -4799,6 +4837,47 @@ class TOMWrapper:
4799
4837
 
4800
4838
  return bim
4801
4839
 
4840
+ def clear_linguistic_schema(self, culture: str):
4841
+ """
4842
+ Clears the linguistic schema for a given culture.
4843
+
4844
+ Parameters
4845
+ ----------
4846
+ culture : str
4847
+ The culture name.
4848
+ """
4849
+
4850
+ empty_schema = f'{{"Version":"1.0.0","Language":"{culture}"}}'
4851
+
4852
+ self.model.Cultures[culture].LinguisticMetadata.Content = json.dumps(
4853
+ empty_schema, indent=4
4854
+ )
4855
+
4856
+ def get_linguistic_schema(self, culture: str) -> dict:
4857
+ """
4858
+ Obtains the linguistic schema for a given culture.
4859
+
4860
+ Parameters
4861
+ ----------
4862
+ culture : str
4863
+ The culture name.
4864
+
4865
+ Returns
4866
+ -------
4867
+ dict
4868
+ The .bim file.
4869
+ """
4870
+
4871
+ import Microsoft.AnalysisServices.Tabular as TOM
4872
+
4873
+ bim = (
4874
+ json.loads(TOM.JsonScripter.ScriptCreate(self.model.Database))
4875
+ .get("create")
4876
+ .get("database")
4877
+ )
4878
+
4879
+ return bim
4880
+
4802
4881
  def _reduce_model(self, perspective_name: str):
4803
4882
  """
4804
4883
  Reduces a model's objects based on a perspective. Adds the dependent objects within a perspective to that perspective.
@@ -5138,8 +5217,596 @@ class TOMWrapper:
5138
5217
  f"{icons.green_dot} The '{object.Name}' {str(object.ObjectType).lower()} has been copied to the '{target_dataset}' semantic model within the '{target_workspace}' workspace."
5139
5218
  )
5140
5219
 
5220
+ def format_dax(
5221
+ self,
5222
+ object: Optional[
5223
+ Union[
5224
+ "TOM.Measure",
5225
+ "TOM.CalcultedColumn",
5226
+ "TOM.CalculationItem",
5227
+ "TOM.CalculatedTable",
5228
+ "TOM.TablePermission",
5229
+ ]
5230
+ ] = None,
5231
+ ):
5232
+ """
5233
+ Formats the DAX expressions of measures, calculated columns, calculation items, calculated tables and row level security expressions in the semantic model.
5234
+
5235
+ This function uses the `DAX Formatter API <https://www.daxformatter.com/>`_.
5236
+
5237
+ Parameters
5238
+ ----------
5239
+ object : TOM Object, default=None
5240
+ The TOM object to format. If None, formats all measures, calculated columns, calculation items, calculated tables and row level security expressions in the semantic model.
5241
+ If a specific object is provided, only that object will be formatted.
5242
+ """
5243
+
5244
+ import Microsoft.AnalysisServices.Tabular as TOM
5245
+
5246
+ if object is None:
5247
+ object_map = {
5248
+ "measures": self.all_measures,
5249
+ "calculated_columns": self.all_calculated_columns,
5250
+ "calculation_items": self.all_calculation_items,
5251
+ "calculated_tables": self.all_calculated_tables,
5252
+ "rls": self.all_rls,
5253
+ }
5254
+
5255
+ for key, func in object_map.items():
5256
+ for obj in func():
5257
+ if key == "calculated_tables":
5258
+ p = next(p for p in obj.Partitions)
5259
+ name = obj.Name
5260
+ expr = p.Source.Expression
5261
+ table = obj.Name
5262
+ elif key == "calculation_items":
5263
+ name = obj.Name
5264
+ expr = obj.Expression
5265
+ table = obj.Parent.Table.Name
5266
+ elif key == "rls":
5267
+ name = obj.Role.Name
5268
+ expr = obj.FilterExpression
5269
+ table = obj.Table.Name
5270
+ else:
5271
+ name = obj.Name
5272
+ expr = obj.Expression
5273
+ table = obj.Table.Name
5274
+ self._dax_formatting[key].append(
5275
+ {
5276
+ "name": name,
5277
+ "expression": expr,
5278
+ "table": table,
5279
+ }
5280
+ )
5281
+ return
5282
+
5283
+ if object.ObjectType == TOM.ObjectType.Measure:
5284
+ self._dax_formatting["measures"].append(
5285
+ {
5286
+ "name": object.Name,
5287
+ "expression": object.Expression,
5288
+ "table": object.Parent.Name,
5289
+ }
5290
+ )
5291
+ elif object.ObjectType == TOM.ObjectType.CalculatedColumn:
5292
+ self._dax_formatting["measures"].append(
5293
+ {
5294
+ "name": object.Name,
5295
+ "expression": object.Expression,
5296
+ "table": object.Parent.Name,
5297
+ }
5298
+ )
5299
+ elif object.ObjectType == TOM.ObjectType.CalculationItem:
5300
+ self._dax_formatting["measures"].append(
5301
+ {
5302
+ "name": object.Name,
5303
+ "expression": object.Expression,
5304
+ "table": object.Parent.Name,
5305
+ }
5306
+ )
5307
+ elif object.ObjectType == TOM.ObjectType.CalculatedTable:
5308
+ self._dax_formatting["measures"].append(
5309
+ {
5310
+ "name": object.Name,
5311
+ "expression": object.Expression,
5312
+ "table": object.Name,
5313
+ }
5314
+ )
5315
+ else:
5316
+ raise ValueError(
5317
+ f"{icons.red_dot} The '{str(object.ObjectType)}' object type is not supported for DAX formatting."
5318
+ )
5319
+
5320
+ def get_linguistic_schema(self, culture: str) -> dict:
5321
+ """
5322
+ Obtains the linguistic schema for a given culture.
5323
+ Parameters
5324
+ ----------
5325
+ culture : str
5326
+ The culture name.
5327
+ Returns
5328
+ -------
5329
+ dict
5330
+ The linguistic schema for the given culture.
5331
+ """
5332
+
5333
+ c = self.model.Cultures[culture]
5334
+ if c.LinguisticMetadata is not None:
5335
+ return json.loads(c.LinguisticMetadata.Content)
5336
+ else:
5337
+ print(
5338
+ f"{icons.info} The '{culture}' culture does not have a linguistic schema."
5339
+ )
5340
+ return None
5341
+
5342
+ def _add_linguistic_schema(self, culture: str):
5343
+
5344
+ import Microsoft.AnalysisServices.Tabular as TOM
5345
+
5346
+ # TODO: if LinguisticMetadata is None
5347
+ # TODO: check if lower() is good enough
5348
+ # TODO: 'in' vs 'has' in relationships
5349
+ # TODO: 'SemanticSlots' in relationships
5350
+
5351
+ c = self.model.Cultures[culture]
5352
+ if c.LinguisticMetadata is not None:
5353
+ lm = json.loads(c.LinguisticMetadata.Content)
5354
+
5355
+ def add_entity(entity, conecptual_entity, conceptual_property):
5356
+ lm["Entities"][entity] = {
5357
+ "Definition": {
5358
+ "Binding": {
5359
+ "ConceptualEntity": conecptual_entity,
5360
+ "ConceptualProperty": conceptual_property,
5361
+ }
5362
+ },
5363
+ "State": "Generated",
5364
+ "Terms": [],
5365
+ }
5366
+
5367
+ def add_relationship(rel_key, table_name, t_name, o_name):
5368
+ lm["Relationships"][rel_key] = {
5369
+ "Binding": {"ConceptualEntity": table_name},
5370
+ "State": "Generated",
5371
+ "Roles": {
5372
+ t_name: {"Target": {"Entity": t_name}},
5373
+ f"{t_name}.{o_name}": {
5374
+ "Target": {"Entity": f"{t_name}.{o_name}"}
5375
+ },
5376
+ },
5377
+ "Phrasings": [
5378
+ {
5379
+ "Attribute": {
5380
+ "Subject": {"Role": t_name},
5381
+ "Object": {"Role": f"{t_name}.{o_name}"},
5382
+ },
5383
+ "State": "Generated",
5384
+ "Weight": 0.99,
5385
+ "ID": f"{t_name}_have_{o_name}",
5386
+ }
5387
+ ],
5388
+ }
5389
+
5390
+ if "Entities" not in lm:
5391
+ lm["Entities"] = {}
5392
+ for t in self.model.Tables:
5393
+ t_lower = t.Name.lower()
5394
+ lm["Entities"][t_lower] = {
5395
+ "Definition": {"Binding": {"ConceptualEntity": t.Name}},
5396
+ "State": "Generated",
5397
+ "Terms": [],
5398
+ }
5399
+ for c in t.Columns:
5400
+ if c.Type != TOM.ColumnType.RowNumber:
5401
+ c_lower = f"{t_lower}.{c.Name.lower()}"
5402
+ add_entity(c_lower, t.Name, c.Name)
5403
+ for m in t.Measures:
5404
+ m_lower = f"{t_lower}.{m.Name.lower()}"
5405
+ add_entity(m_lower, t.Name, m.Name)
5406
+ for h in t.Hierarchies:
5407
+ h_lower = f"{t_lower}.{h.Name.lower()}"
5408
+ add_entity(h_lower, t.Name, h.Name)
5409
+ # if "Relationships" not in lm:
5410
+ # lm["Relationships"] = {}
5411
+ # for c in self.all_columns():
5412
+ # table_name = c.Parent.Name
5413
+ # t_name = table_name.lower()
5414
+ # object_name = c.Name
5415
+ # o_name = object_name.lower()
5416
+ # rel_key = f"{t_name}_has_{o_name}"
5417
+ # add_relationship(rel_key, table_name, t_name, o_name)
5418
+ # for m in self.all_measures():
5419
+ # table_name = c.Parent.Name
5420
+ # t_name = table_name.lower()
5421
+ # object_name = m.Name
5422
+ # o_name = object_name.lower()
5423
+ # rel_key = f"{t_name}_has_{o_name}"
5424
+ # add_relationship(rel_key, table_name, t_name, o_name)
5425
+
5426
+ self.model.Cultures[culture].LinguisticMetadata.Content = json.dumps(lm)
5427
+
5428
+ @staticmethod
5429
+ def _get_synonym_info(
5430
+ lm: dict,
5431
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
5432
+ synonym_name: str,
5433
+ ):
5434
+
5435
+ import Microsoft.AnalysisServices.Tabular as TOM
5436
+
5437
+ object_type = object.ObjectType
5438
+ obj = None
5439
+ syn_exists = False
5440
+
5441
+ for key, v in lm.get("Entities", []).items():
5442
+ binding = v.get("Definition", {}).get("Binding", {})
5443
+ t_name = binding.get("ConceptualEntity")
5444
+ o_name = binding.get("ConceptualProperty")
5445
+
5446
+ if (
5447
+ object_type == TOM.ObjectType.Table
5448
+ and t_name == object.Name
5449
+ and o_name is None
5450
+ ) or (
5451
+ object_type
5452
+ in [
5453
+ TOM.ObjectType.Column,
5454
+ TOM.ObjectType.Measure,
5455
+ TOM.ObjectType.Hierarchy,
5456
+ ]
5457
+ and t_name == object.Parent.Name
5458
+ and o_name == object.Name
5459
+ ):
5460
+ obj = key
5461
+ terms = v.get("Terms", [])
5462
+ syn_exists = any(synonym_name in term for term in terms)
5463
+ # optionally break early if match is found
5464
+ break
5465
+
5466
+ return obj, syn_exists
5467
+
5468
+ def set_synonym(
5469
+ self,
5470
+ culture: str,
5471
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
5472
+ synonym_name: str,
5473
+ weight: Optional[Decimal] = None,
5474
+ ):
5475
+ """
5476
+ Sets a synonym for a table/column/measure/hierarchy in the linguistic schema of the semantic model. This function is currently in preview.
5477
+
5478
+ Parameters
5479
+ ----------
5480
+ culture : str
5481
+ The culture name for which the synonym is being set. Example: 'en-US'.
5482
+ object : TOM Object
5483
+ The TOM object for which the synonym is being set. This can be a table, column, measure, or hierarchy.
5484
+ synonym_name : str
5485
+ The name of the synonym to be set.
5486
+ weight : Decimal, default=None
5487
+ The weight of the synonym. If None, the default weight is used. The weight must be a Decimal value between 0 and 1.
5488
+ """
5489
+
5490
+ import Microsoft.AnalysisServices.Tabular as TOM
5491
+
5492
+ object_type = object.ObjectType
5493
+
5494
+ if object_type not in [
5495
+ TOM.ObjectType.Table,
5496
+ TOM.ObjectType.Column,
5497
+ TOM.ObjectType.Measure,
5498
+ TOM.ObjectType.Hierarchy,
5499
+ ]:
5500
+ raise ValueError(
5501
+ f"{icons.red_dot} This function only supports adding synonyms for tables/columns/measures/hierarchies."
5502
+ )
5503
+
5504
+ # Add base linguistic schema in case it does not yet exist
5505
+ self._add_linguistic_schema(culture=culture)
5506
+
5507
+ # Extract linguistic metadata content
5508
+ lm = json.loads(self.model.Cultures[culture].LinguisticMetadata.Content)
5509
+
5510
+ # Generate synonym dictionary
5511
+ _validate_weight(weight)
5512
+ now = datetime.now().isoformat(timespec="milliseconds") + "Z"
5513
+ syn_dict = {"Type": "Noun", "State": "Authored", "LastModified": now}
5514
+ if weight is not None:
5515
+ syn_dict["Weight"] = weight
5516
+
5517
+ updated = False
5518
+
5519
+ (obj, syn_exists) = self._get_synonym_info(
5520
+ lm=lm, object=object, synonym_name=synonym_name
5521
+ )
5522
+
5523
+ entities = lm.get("Entities", {})
5524
+
5525
+ def get_unique_entity_key(object, object_type, entities):
5526
+
5527
+ if object_type == TOM.ObjectType.Table:
5528
+ base_obj = object.Name.lower().replace(" ", "_")
5529
+ else:
5530
+ base_obj = f"{object.Parent.Name}.{object.Name}".lower().replace(
5531
+ " ", "_"
5532
+ )
5533
+
5534
+ obj = base_obj
5535
+ counter = 1
5536
+ existing_keys = set(entities.keys())
5537
+
5538
+ # Make sure the object name is unique
5539
+ while obj in existing_keys:
5540
+ obj = f"{base_obj}_{counter}"
5541
+ counter += 1
5542
+
5543
+ return obj
5544
+
5545
+ # Update linguistic metadata content
5546
+ if obj is None:
5547
+ obj = get_unique_entity_key(object, object_type, entities)
5548
+ lm["Entities"][obj] = {
5549
+ "Definition": {"Binding": {}},
5550
+ "State": "Authored",
5551
+ "Terms": [
5552
+ {synonym_name: syn_dict},
5553
+ ],
5554
+ }
5555
+ if object_type == TOM.ObjectType.Table:
5556
+ lm["Entities"][obj]["Definition"]["Binding"][
5557
+ "ConceptualEntity"
5558
+ ] = object.Name
5559
+ else:
5560
+ lm["Entities"][obj]["Definition"]["Binding"][
5561
+ "ConceptualEntity"
5562
+ ] = object.Parent.Name
5563
+ lm["Entities"][obj]["Definition"]["Binding"][
5564
+ "ConceptualProperty"
5565
+ ] = object.Name
5566
+ updated = True
5567
+ elif not syn_exists:
5568
+ lm["Entities"][obj]["Terms"].append({synonym_name: syn_dict})
5569
+ updated = True
5570
+ else:
5571
+ for term in lm["Entities"][obj]["Terms"]:
5572
+ if term == synonym_name:
5573
+ lm["Entities"][obj]["Terms"][term] = syn_dict
5574
+ updated = True
5575
+
5576
+ if "State" in lm["Entities"][obj]:
5577
+ del lm["Entities"][obj]["State"]
5578
+
5579
+ if updated:
5580
+ self.model.Cultures[culture].LinguisticMetadata.Content = json.dumps(
5581
+ lm, indent=4
5582
+ )
5583
+ if object_type == TOM.ObjectType.Table:
5584
+ print(
5585
+ f"{icons.green_dot} The '{synonym_name}' synonym was set for the '{object.Name}' table."
5586
+ )
5587
+ else:
5588
+ print(
5589
+ f"{icons.green_dot} The '{synonym_name}' synonym was set for the '{object.Parent.Name}'[{object.Name}] column."
5590
+ )
5591
+
5592
+ def delete_synonym(
5593
+ self,
5594
+ culture: str,
5595
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
5596
+ synonym_name: str,
5597
+ ):
5598
+ """
5599
+ Deletes a synonym for a table/column/measure/hierarchy in the linguistic schema of the semantic model. This function is currently in preview.
5600
+
5601
+ Parameters
5602
+ ----------
5603
+ culture : str
5604
+ The culture name for which the synonym is being deleted. Example: 'en-US'.
5605
+ object : TOM Object
5606
+ The TOM object for which the synonym is being deleted. This can be a table, column, measure, or hierarchy.
5607
+ synonym_name : str
5608
+ The name of the synonym to be deleted.
5609
+ """
5610
+
5611
+ import Microsoft.AnalysisServices.Tabular as TOM
5612
+
5613
+ if not any(c.Name == culture for c in self.model.Cultures):
5614
+ raise ValueError(
5615
+ f"{icons.red_dot} The '{culture}' culture does not exist within the semantic model."
5616
+ )
5617
+
5618
+ if object.ObjectType not in [
5619
+ TOM.ObjectType.Table,
5620
+ TOM.ObjectType.Column,
5621
+ TOM.ObjectType.Measure,
5622
+ TOM.ObjectType.Hierarchy,
5623
+ ]:
5624
+ raise ValueError(
5625
+ f"{icons.red_dot} This function only supports tables/columns/measures/hierarchies."
5626
+ )
5627
+
5628
+ lm = json.loads(self.model.Cultures[culture].LinguisticMetadata.Content)
5629
+
5630
+ if "Entities" not in lm:
5631
+ print(
5632
+ f"{icons.warning} There is no linguistic schema for the '{culture}' culture."
5633
+ )
5634
+ return
5635
+
5636
+ (obj, syn_exists) = self._get_synonym_info(
5637
+ lm=lm, object=object, synonym_name=synonym_name
5638
+ )
5639
+
5640
+ # Mark the synonym as deleted if it exists
5641
+ if obj is not None and syn_exists:
5642
+ data = lm["Entities"][obj]["Terms"]
5643
+ next(
5644
+ (
5645
+ item[synonym_name].update({"State": "Deleted"})
5646
+ for item in data
5647
+ if synonym_name in item
5648
+ ),
5649
+ None,
5650
+ )
5651
+
5652
+ self.model.Cultures[culture].LinguisticMetadata.Content = json.dumps(
5653
+ lm, indent=4
5654
+ )
5655
+ print(
5656
+ f"{icons.green_dot} The '{synonym_name}' synonym was marked as status 'Deleted' for the '{object.Name}' object."
5657
+ )
5658
+ else:
5659
+ print(
5660
+ f"{icons.info} The '{synonym_name}' synonym does not exist for the '{object.Name}' object."
5661
+ )
5662
+
5663
+ def _lock_linguistic_schema(self, culture: str):
5664
+
5665
+ c = self.model.Cultures[culture]
5666
+ if c.LinguisticMetadata is not None:
5667
+ lm = json.loads(c.LinguisticMetadata.Content)
5668
+ if "DynamicImprovement" not in lm:
5669
+ lm["DynamicImprovement"] = {}
5670
+ lm["DynamicImprovement"]["Schema"] = None
5671
+
5672
+ c.LinguisticMetadata.Content = json.dumps(lm, indent=4)
5673
+
5674
+ def _unlock_linguistic_schema(self, culture: str):
5675
+
5676
+ c = self.model.Cultures[culture]
5677
+ if c.LinguisticMetadata is not None:
5678
+ lm = json.loads(c.LinguisticMetadata.Content)
5679
+ if "DynamicImprovement" in lm:
5680
+ del lm["DynamicImprovement"]["Schema"]
5681
+
5682
+ c.LinguisticMetadata.Content = json.dumps(lm, indent=4)
5683
+
5684
+ def _export_linguistic_schema(self, culture: str, file_path: str):
5685
+
5686
+ if not lakehouse_attached():
5687
+ raise ValueError(
5688
+ f"{icons.red_dot} A lakehouse must be attached to the notebook in order to export a linguistic schema."
5689
+ )
5690
+
5691
+ if not any(c.Name == culture for c in self.model.Cultures):
5692
+ raise ValueError(
5693
+ f"{icons.red_dot} The '{culture}' culture does not exist within the semantic model."
5694
+ )
5695
+
5696
+ folderPath = "/lakehouse/default/Files"
5697
+ fileExt = ".json"
5698
+ if not file_path.endswith(fileExt):
5699
+ file_path = f"{file_path}{fileExt}"
5700
+
5701
+ for c in self.model.Cultures:
5702
+ if c.Name == culture:
5703
+ lm = json.loads(c.LinguisticMetadata.Content)
5704
+ filePath = os.path.join(folderPath, file_path)
5705
+ with open(filePath, "w") as json_file:
5706
+ json.dump(lm, json_file, indent=4)
5707
+
5708
+ print(
5709
+ f"{icons.green_dot} The linguistic schema for the '{culture}' culture was saved as the '{file_path}' file within the lakehouse attached to this notebook."
5710
+ )
5711
+
5712
+ def _import_linguistic_schema(self, file_path: str):
5713
+
5714
+ if not file_path.endswith(".json"):
5715
+ raise ValueError(f"{icons.red_dot} The 'file_path' must be a .json file.")
5716
+
5717
+ with open(file_path, "r") as json_file:
5718
+ schema_file = json.load(json_file)
5719
+
5720
+ # Validate structure
5721
+ required_keys = ["Version", "Language", "Entities", "Relationships"]
5722
+ if not all(key in schema_file for key in required_keys):
5723
+ raise ValueError(
5724
+ f"{icons.red_dot} The 'schema_file' is not in the proper format."
5725
+ )
5726
+
5727
+ culture_name = schema_file["Language"]
5728
+
5729
+ # Validate culture
5730
+ if not any(c.Name == culture_name for c in self.model.Cultures):
5731
+ raise ValueError(
5732
+ f"{icons.red_dot} The culture of the schema_file is not a valid culture within the semantic model."
5733
+ )
5734
+
5735
+ self.model.Cultures[culture_name].LinguisticMetadata.Content = json.dumps(
5736
+ schema_file, indent=4
5737
+ )
5738
+
5141
5739
  def close(self):
5142
5740
 
5741
+ # DAX Formatting
5742
+ from sempy_labs._daxformatter import _format_dax
5743
+
5744
+ def _process_dax_objects(object_type, model_accessor=None):
5745
+ items = self._dax_formatting.get(object_type, [])
5746
+ if not items:
5747
+ return False
5748
+
5749
+ # Extract and format expressions
5750
+ expressions = [item["expression"] for item in items]
5751
+ metadata = [
5752
+ {"name": item["name"], "table": item["table"], "type": object_type}
5753
+ for item in items
5754
+ ]
5755
+
5756
+ formatted_expressions = _format_dax(expressions, metadata=metadata)
5757
+
5758
+ # Update the expressions in the original structure
5759
+ for item, formatted in zip(items, formatted_expressions):
5760
+ item["expression"] = formatted
5761
+
5762
+ # Apply updated expressions to the model
5763
+ for item in items:
5764
+ table_name = (
5765
+ item["table"]
5766
+ if object_type != "calculated_tables"
5767
+ else item["name"]
5768
+ )
5769
+ name = item["name"]
5770
+ expression = item["expression"]
5771
+
5772
+ if object_type == "calculated_tables":
5773
+ t = self.model.Tables[table_name]
5774
+ p = next(p for p in t.Partitions)
5775
+ p.Source.Expression = expression
5776
+ elif object_type == "rls":
5777
+ self.model.Roles[name].TablePermissions[
5778
+ table_name
5779
+ ].FilterExpression = expression
5780
+ elif object_type == "calculation_items":
5781
+ self.model.Tables[table_name].CalculationGroup.CalculationItems[
5782
+ name
5783
+ ].Expression = expression
5784
+ else:
5785
+ getattr(self.model.Tables[table_name], model_accessor)[
5786
+ name
5787
+ ].Expression = expression
5788
+ return True
5789
+
5790
+ # Use the helper for each object type
5791
+ a = _process_dax_objects("measures", "Measures")
5792
+ b = _process_dax_objects("calculated_columns", "Columns")
5793
+ c = _process_dax_objects("calculation_items")
5794
+ d = _process_dax_objects("calculated_tables")
5795
+ e = _process_dax_objects("rls")
5796
+ if any([a, b, c, d, e]) and not self._readonly:
5797
+ from IPython.display import display, HTML
5798
+
5799
+ html = """
5800
+ <span style="font-family: Segoe UI, Arial, sans-serif; color: #cccccc;">
5801
+ CODE BEAUTIFIED WITH
5802
+ </span>
5803
+ <a href="https://www.daxformatter.com" target="_blank" style="font-family: Segoe UI, Arial, sans-serif; color: #ff5a5a; font-weight: bold; text-decoration: none;">
5804
+ DAX FORMATTER
5805
+ </a>
5806
+ """
5807
+
5808
+ display(HTML(html))
5809
+
5143
5810
  if not self._readonly and self.model is not None:
5144
5811
 
5145
5812
  import Microsoft.AnalysisServices.Tabular as TOM