semantic-link-labs 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -13
  5. sempy_labs/_ai.py +43 -24
  6. sempy_labs/_clear_cache.py +4 -5
  7. sempy_labs/_connections.py +77 -70
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +55 -44
  10. sempy_labs/_helper_functions.py +13 -6
  11. sempy_labs/_icons.py +14 -0
  12. sempy_labs/_list_functions.py +491 -304
  13. sempy_labs/_model_auto_build.py +4 -3
  14. sempy_labs/_model_bpa.py +131 -1118
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +14 -12
  17. sempy_labs/_one_lake_integration.py +11 -5
  18. sempy_labs/_query_scale_out.py +89 -81
  19. sempy_labs/_refresh_semantic_model.py +16 -10
  20. sempy_labs/_translations.py +213 -287
  21. sempy_labs/_vertipaq.py +53 -37
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  25. sempy_labs/directlake/_fallback.py +5 -3
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -2
  28. sempy_labs/directlake/_guardrails.py +3 -3
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +132 -9
  33. sempy_labs/directlake/_warm_cache.py +6 -3
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  36. sempy_labs/lakehouse/_lakehouse.py +2 -1
  37. sempy_labs/lakehouse/_shortcuts.py +19 -12
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -15
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +2 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +8 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +10 -5
  48. sempy_labs/report/_report_functions.py +67 -29
  49. sempy_labs/report/_report_rebind.py +9 -8
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +555 -152
  52. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py CHANGED
@@ -21,7 +21,8 @@ class TOMWrapper:
21
21
  """
22
22
  Convenience wrapper around the TOM object model for a semantic model. Always use the connect_semantic_model function to make sure the TOM object is initialized correctly.
23
23
 
24
- `XMLA read/write endpoints <https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#to-enable-read-write-for-a-premium-capacity>`_ must be enabled if setting the readonly parameter to False.
24
+ `XMLA read/write endpoints <https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#to-enable-read-write-for-a-premium-capacity>`_ must
25
+ be enabled if setting the readonly parameter to False.
25
26
  """
26
27
 
27
28
  _dataset: str
@@ -93,7 +94,9 @@ class TOMWrapper:
93
94
  import Microsoft.AnalysisServices.Tabular as TOM
94
95
 
95
96
  for t in self.model.Tables:
96
- if any(p.SourceType == TOM.PartitionSourceType.Calculated for p in t.Partitions):
97
+ if any(
98
+ p.SourceType == TOM.PartitionSourceType.Calculated for p in t.Partitions
99
+ ):
97
100
  yield t
98
101
 
99
102
  def all_calculation_groups(self):
@@ -500,7 +503,8 @@ class TOMWrapper:
500
503
  description: Optional[str] = None,
501
504
  ):
502
505
  """
503
- Adds a `calculation item <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationitem?view=analysisservices-dotnet>`_ to a `calculation group <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationgroup?view=analysisservices-dotnet>`_ within a semantic model.
506
+ Adds a `calculation item <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationitem?view=analysisservices-dotnet>`_ to
507
+ a `calculation group <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationgroup?view=analysisservices-dotnet>`_ within a semantic model.
504
508
 
505
509
  Parameters
506
510
  ----------
@@ -582,11 +586,15 @@ class TOMWrapper:
582
586
  tp.Table = self.model.Tables[table_name]
583
587
  tp.FilterExpression = filter_expression
584
588
 
585
- try:
589
+ if any(
590
+ t.Name == table_name and r.Name == role_name
591
+ for r in self.model.Roles
592
+ for t in r.TablePermissions
593
+ ):
586
594
  self.model.Roles[role_name].TablePermissions[
587
595
  table_name
588
596
  ].FilterExpression = filter_expression
589
- except:
597
+ else:
590
598
  self.model.Roles[role_name].TablePermissions.Add(tp)
591
599
 
592
600
  def set_ols(
@@ -613,17 +621,22 @@ class TOMWrapper:
613
621
  permission = permission.capitalize()
614
622
 
615
623
  if permission not in ["Read", "None", "Default"]:
616
- print(f"ERROR! Invalid 'permission' value.")
617
- return
624
+ raise ValueError(f"{icons.red_dot} Invalid 'permission' value.")
618
625
 
619
626
  cp = TOM.ColumnPermission()
620
627
  cp.Column = self.model.Tables[table_name].Columns[column_name]
621
628
  cp.MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
622
- try:
629
+
630
+ if any(
631
+ c.Name == column_name and t.Name == table_name and r.Name == role_name
632
+ for r in self.model.Roles
633
+ for t in r.TablePermissions
634
+ for c in t.ColumnPermissions
635
+ ):
623
636
  self.model.Roles[role_name].TablePermissions[table_name].ColumnPermissions[
624
637
  column_name
625
638
  ].MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
626
- except:
639
+ else:
627
640
  self.model.Roles[role_name].TablePermissions[
628
641
  table_name
629
642
  ].ColumnPermissions.Add(cp)
@@ -658,16 +671,22 @@ class TOMWrapper:
658
671
  import Microsoft.AnalysisServices.Tabular as TOM
659
672
 
660
673
  if isinstance(columns, str):
661
- raise ValueError(f"{icons.red_dot} The 'levels' parameter must be a list. For example: ['Continent', 'Country', 'City']")
662
-
674
+ raise ValueError(
675
+ f"{icons.red_dot} The 'levels' parameter must be a list. For example: ['Continent', 'Country', 'City']"
676
+ )
677
+
663
678
  if len(columns) == 1:
664
- raise ValueError(f"{icons.red_dot} There must be at least 2 levels in order to create a hierarchy.")
679
+ raise ValueError(
680
+ f"{icons.red_dot} There must be at least 2 levels in order to create a hierarchy."
681
+ )
665
682
 
666
683
  if levels is None:
667
684
  levels = columns
668
685
 
669
686
  if len(columns) != len(levels):
670
- raise ValueError(f"{icons.red_dot} If specifying level names, you must specify a level for each column.")
687
+ raise ValueError(
688
+ f"{icons.red_dot} If specifying level names, you must specify a level for each column."
689
+ )
671
690
 
672
691
  obj = TOM.Hierarchy()
673
692
  obj.Name = hierarchy_name
@@ -858,10 +877,8 @@ class TOMWrapper:
858
877
  cul = TOM.Culture()
859
878
  cul.Name = language
860
879
 
861
- try:
880
+ if not any(c.Name == language for c in self.model.Cultures):
862
881
  self.model.Cultures.Add(cul)
863
- except:
864
- pass
865
882
 
866
883
  def add_perspective(self, perspective_name: str):
867
884
  """
@@ -997,7 +1014,9 @@ class TOMWrapper:
997
1014
  import System
998
1015
 
999
1016
  if base_column is not None and base_table is None:
1000
- raise ValueError(f"{icons.red_dot} If you specify the base table you must also specify the base column")
1017
+ raise ValueError(
1018
+ f"{icons.red_dot} If you specify the base table you must also specify the base column"
1019
+ )
1001
1020
 
1002
1021
  summarization_type = (
1003
1022
  summarization_type.replace(" ", "")
@@ -1007,7 +1026,9 @@ class TOMWrapper:
1007
1026
 
1008
1027
  summarizationTypes = ["Sum", "GroupBy", "Count", "Min", "Max"]
1009
1028
  if summarization_type not in summarizationTypes:
1010
- raise ValueError(f"{icons.red_dot} The 'summarization_type' parameter must be one of the following valuse: {summarizationTypes}.")
1029
+ raise ValueError(
1030
+ f"{icons.red_dot} The 'summarization_type' parameter must be one of the following valuse: {summarizationTypes}."
1031
+ )
1011
1032
 
1012
1033
  ao = TOM.AlternateOf()
1013
1034
  ao.Summarization = System.Enum.Parse(TOM.SummarizationType, summarization_type)
@@ -1085,9 +1106,9 @@ class TOMWrapper:
1085
1106
  ann.Name = name
1086
1107
  ann.Value = value
1087
1108
 
1088
- try:
1109
+ if any(a.Name == name for a in object.Annotations):
1089
1110
  object.Annotations[name].Value = value
1090
- except:
1111
+ else:
1091
1112
  object.Annotations.Add(ann)
1092
1113
 
1093
1114
  def get_annotation_value(self, object, name: str):
@@ -1106,8 +1127,12 @@ class TOMWrapper:
1106
1127
  str
1107
1128
  The annotation value.
1108
1129
  """
1130
+ if any(a.Name == name for a in object.Annotations):
1131
+ value = object.Annotations[name].Value
1132
+ else:
1133
+ value = None
1109
1134
 
1110
- return object.Annotations[name].Value
1135
+ return value
1111
1136
 
1112
1137
  def remove_annotation(self, object, name: str):
1113
1138
  """
@@ -1185,9 +1210,9 @@ class TOMWrapper:
1185
1210
  ep.Name = name
1186
1211
  ep.Value = value
1187
1212
 
1188
- try:
1213
+ if any(a.Name == name for a in object.Annotations):
1189
1214
  object.ExtendedProperties[name].Value = value
1190
- except:
1215
+ else:
1191
1216
  object.ExtendedProperties.Add(ep)
1192
1217
 
1193
1218
  def get_extended_property_value(self, object, name: str):
@@ -1206,8 +1231,12 @@ class TOMWrapper:
1206
1231
  str
1207
1232
  The extended property value.
1208
1233
  """
1234
+ if any(a.Name == name for a in object.ExtendedProperties):
1235
+ value = object.ExtendedProperties[name].Value
1236
+ else:
1237
+ value = None
1209
1238
 
1210
- return object.ExtendedProperties[name].Value
1239
+ return value
1211
1240
 
1212
1241
  def remove_extended_property(self, object, name: str):
1213
1242
  """
@@ -1266,7 +1295,9 @@ class TOMWrapper:
1266
1295
  objectType = object.ObjectType
1267
1296
 
1268
1297
  if objectType not in validObjects:
1269
- raise ValueError(f"{icons.red_dot} Only the following object types are valid for perspectives: {validObjects}.")
1298
+ raise ValueError(
1299
+ f"{icons.red_dot} Only the following object types are valid for perspectives: {validObjects}."
1300
+ )
1270
1301
 
1271
1302
  object.Model.Perspectives[perspective_name]
1272
1303
 
@@ -1288,7 +1319,7 @@ class TOMWrapper:
1288
1319
  object.Parent.Name
1289
1320
  ].PerspectiveHierarchies[object.Name]
1290
1321
  return True
1291
- except:
1322
+ except Exception:
1292
1323
  return False
1293
1324
 
1294
1325
  def add_to_perspective(
@@ -1317,14 +1348,17 @@ class TOMWrapper:
1317
1348
  objectType = object.ObjectType
1318
1349
 
1319
1350
  if objectType not in validObjects:
1320
- raise ValueError(f"{icons.red_dot} Only the following object types are valid for perspectives: {validObjects}.")
1351
+ raise ValueError(
1352
+ f"{icons.red_dot} Only the following object types are valid for perspectives: {validObjects}."
1353
+ )
1321
1354
 
1322
- try:
1355
+ if any(p.Name == perspective_name for p in self.model.Perspectives):
1323
1356
  object.Model.Perspectives[perspective_name]
1324
- except:
1325
- raise ValueError(f"{icons.red_dot} The '{perspective_name}' perspective does not exist.")
1357
+ else:
1358
+ raise ValueError(
1359
+ f"{icons.red_dot} The '{perspective_name}' perspective does not exist."
1360
+ )
1326
1361
 
1327
- # try:
1328
1362
  if objectType == TOM.ObjectType.Table:
1329
1363
  pt = TOM.PerspectiveTable()
1330
1364
  pt.Table = object
@@ -1347,8 +1381,6 @@ class TOMWrapper:
1347
1381
  object.Model.Perspectives[perspective_name].PerspectiveTables[
1348
1382
  object.Parent.Name
1349
1383
  ].PerspectiveHierarchies.Add(ph)
1350
- # except:
1351
- # pass
1352
1384
 
1353
1385
  def remove_from_perspective(
1354
1386
  self,
@@ -1376,14 +1408,15 @@ class TOMWrapper:
1376
1408
  objectType = object.ObjectType
1377
1409
 
1378
1410
  if objectType not in validObjects:
1379
- raise ValueError(f"{icons.red_dot} Only the following object types are valid for perspectives: {validObjects}.")
1411
+ raise ValueError(
1412
+ f"{icons.red_dot} Only the following object types are valid for perspectives: {validObjects}."
1413
+ )
1380
1414
 
1381
- try:
1382
- object.Model.Perspectives[perspective_name]
1383
- except:
1384
- raise ValueError(f"{icons.red_dot} The '{perspective_name}' perspective does not exist.")
1415
+ if not any(p.Name == perspective_name for p in self.model.Perspectives):
1416
+ raise ValueError(
1417
+ f"{icons.red_dot} The '{perspective_name}' perspective does not exist."
1418
+ )
1385
1419
 
1386
- # try:
1387
1420
  if objectType == TOM.ObjectType.Table:
1388
1421
  pt = object.Model.Perspectives[perspective_name].PerspectiveTables[
1389
1422
  object.Name
@@ -1416,12 +1449,12 @@ class TOMWrapper:
1416
1449
  object.Model.Perspectives[perspective_name].PerspectiveTables[
1417
1450
  object.Parent.Name
1418
1451
  ].PerspectiveHierarchies.Remove(ph)
1419
- # except:
1420
- # pass
1421
1452
 
1422
1453
  def set_translation(
1423
1454
  self,
1424
- object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1455
+ object: Union[
1456
+ "TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy", "TOM.Level"
1457
+ ],
1425
1458
  language: str,
1426
1459
  property: str,
1427
1460
  value: str,
@@ -1451,10 +1484,13 @@ class TOMWrapper:
1451
1484
  TOM.ObjectType.Column,
1452
1485
  TOM.ObjectType.Measure,
1453
1486
  TOM.ObjectType.Hierarchy,
1454
- ] # , 'Level'
1487
+ TOM.ObjectType.Level,
1488
+ ]
1455
1489
 
1456
1490
  if object.ObjectType not in validObjects:
1457
- raise ValueError(f"{icons.red_dot} Translations can only be set to {validObjects}.")
1491
+ raise ValueError(
1492
+ f"{icons.red_dot} Translations can only be set to {validObjects}."
1493
+ )
1458
1494
 
1459
1495
  mapping = {
1460
1496
  "Name": TOM.TranslatedProperty.Caption,
@@ -1463,21 +1499,38 @@ class TOMWrapper:
1463
1499
  }
1464
1500
 
1465
1501
  prop = mapping.get(property)
1466
- if prop == None:
1467
- raise ValueError(f"{icons.red_dot} Invalid property value. Please choose from the following: ['Name', 'Description', Display Folder].")
1502
+ if prop is None:
1503
+ raise ValueError(
1504
+ f"{icons.red_dot} Invalid property value. Please choose from the following: ['Name', 'Description', Display Folder]."
1505
+ )
1468
1506
 
1469
- try:
1470
- object.Model.Cultures[language]
1471
- except:
1472
- raise ValueError(f"{icons.red_dot} The '{language}' translation language does not exist in the semantic model.")
1507
+ if not any(c.Name == language for c in self.model.Cultures):
1508
+ raise ValueError(
1509
+ f"{icons.red_dot} The '{language}' translation language does not exist in the semantic model."
1510
+ )
1473
1511
 
1474
1512
  object.Model.Cultures[language].ObjectTranslations.SetTranslation(
1475
1513
  object, prop, value
1476
1514
  )
1477
1515
 
1516
+ if object.ObjectType in [TOM.ObjectType.Table, TOM.ObjectType.Measure]:
1517
+ print(
1518
+ f"{icons.green_dot} The {property} property for the '{object.Name}' {str(object.ObjectType).lower()} has been translated into '{language}' as '{value}'."
1519
+ )
1520
+ elif object.ObjectType in [
1521
+ TOM.ObjectType.Column,
1522
+ TOM.ObjectType.Hierarchy,
1523
+ TOM.ObjectType.Level,
1524
+ ]:
1525
+ print(
1526
+ f"{icons.green_dot} The {property} property for the '{object.Parent.Name}'[{object.Name}] {str(object.ObjectType).lower()} has been translated into '{language}' as '{value}'."
1527
+ )
1528
+
1478
1529
  def remove_translation(
1479
1530
  self,
1480
- object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1531
+ object: Union[
1532
+ "TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy", "TOM.Level"
1533
+ ],
1481
1534
  language: str,
1482
1535
  ):
1483
1536
  """
@@ -1515,7 +1568,7 @@ class TOMWrapper:
1515
1568
  for lang in object.Model.Cultures:
1516
1569
  try:
1517
1570
  self.remove_translation(object=object, language=lang.Name)
1518
- except:
1571
+ except Exception:
1519
1572
  pass
1520
1573
  if objType in ["Table", "Column", "Measure", "Hierarchy"]:
1521
1574
  for persp in object.Model.Perspectives:
@@ -1523,7 +1576,7 @@ class TOMWrapper:
1523
1576
  self.remove_from_perspective(
1524
1577
  object=object, perspective_name=persp.Name
1525
1578
  )
1526
- except:
1579
+ except Exception:
1527
1580
  pass
1528
1581
 
1529
1582
  if objType == TOM.ObjectType.Column:
@@ -1631,12 +1684,12 @@ class TOMWrapper:
1631
1684
  objType = column.ObjectType
1632
1685
 
1633
1686
  if objType == TOM.ObjectType.Column:
1634
- for l in self.all_levels():
1687
+ for lev in self.all_levels():
1635
1688
  if (
1636
- l.Parent.Table.Name == column.Parent.Name
1637
- and l.Column.Name == column.Name
1689
+ lev.Parent.Table.Name == column.Parent.Name
1690
+ and lev.Column.Name == column.Name
1638
1691
  ):
1639
- yield l.Parent
1692
+ yield lev.Parent
1640
1693
 
1641
1694
  def used_in_sort_by(self, column: "TOM.Column"):
1642
1695
  """
@@ -1829,7 +1882,7 @@ class TOMWrapper:
1829
1882
  if m.Name in meas:
1830
1883
  yield m
1831
1884
 
1832
- def hybrid_tables(self):
1885
+ def all_hybrid_tables(self):
1833
1886
  """
1834
1887
  Outputs the `hybrid tables <https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables>`_ within a semantic model.
1835
1888
 
@@ -1848,7 +1901,7 @@ class TOMWrapper:
1848
1901
  if any(p.Mode == TOM.ModeType.DirectQuery for p in t.Partitions):
1849
1902
  yield t
1850
1903
 
1851
- def date_tables(self):
1904
+ def all_date_tables(self):
1852
1905
  """
1853
1906
  Outputs the tables which are marked as `date tables <https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables>`_ within a semantic model.
1854
1907
 
@@ -1915,7 +1968,11 @@ class TOMWrapper:
1915
1968
  """
1916
1969
  import Microsoft.AnalysisServices.Tabular as TOM
1917
1970
 
1918
- return any(c.IsKey and c.DataType == TOM.DataType.DateTime for c in self.all_columns() if c.Parent.Name == table_name and c.Parent.DataCategory == 'Time')
1971
+ return any(
1972
+ c.IsKey and c.DataType == TOM.DataType.DateTime
1973
+ for c in self.all_columns()
1974
+ if c.Parent.Name == table_name and c.Parent.DataCategory == "Time"
1975
+ )
1919
1976
 
1920
1977
  def mark_as_date_table(self, table_name: str, column_name: str):
1921
1978
  """
@@ -1933,8 +1990,10 @@ class TOMWrapper:
1933
1990
  t = self.model.Tables[table_name]
1934
1991
  c = t.Columns[column_name]
1935
1992
  if c.DataType != TOM.DataType.DateTime:
1936
- raise ValueError(f"{icons.red_dot} The column specified in the 'column_name' parameter in this function must be of DateTime data type.")
1937
-
1993
+ raise ValueError(
1994
+ f"{icons.red_dot} The column specified in the 'column_name' parameter in this function must be of DateTime data type."
1995
+ )
1996
+
1938
1997
  daxQuery = f"""
1939
1998
  define measure '{table_name}'[test] =
1940
1999
  var mn = MIN('{table_name}'[{column_name}])
@@ -1953,7 +2012,9 @@ class TOMWrapper:
1953
2012
  )
1954
2013
  value = df["1"].iloc[0]
1955
2014
  if value != "1":
1956
- raise ValueError(f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values.")
2015
+ raise ValueError(
2016
+ f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2017
+ )
1957
2018
 
1958
2019
  # Mark as a date table
1959
2020
  t.DataCategory = "Time"
@@ -2007,7 +2068,7 @@ class TOMWrapper:
2007
2068
  -------
2008
2069
  bool
2009
2070
  Indicates if the semantic model has a hybrid table.
2010
- """
2071
+ """
2011
2072
 
2012
2073
  return any(self.is_hybrid_table(table_name=t.Name) for t in self.model.Tables)
2013
2074
 
@@ -2148,12 +2209,19 @@ class TOMWrapper:
2148
2209
  # https://github.com/m-kovalsky/Tabular/blob/master/KPI%20Graphics.md
2149
2210
 
2150
2211
  if measure_name == target:
2151
- raise ValueError(f"{icons.red_dot} The 'target' parameter cannot be the same measure as the 'measure_name' parameter.")
2212
+ raise ValueError(
2213
+ f"{icons.red_dot} The 'target' parameter cannot be the same measure as the 'measure_name' parameter."
2214
+ )
2152
2215
 
2153
2216
  if status_graphic is None:
2154
2217
  status_graphic = "Three Circles Colored"
2155
2218
 
2156
- valid_status_types = ["Linear", "LinearReversed", "Centered", "CenteredReversed"]
2219
+ valid_status_types = [
2220
+ "Linear",
2221
+ "LinearReversed",
2222
+ "Centered",
2223
+ "CenteredReversed",
2224
+ ]
2157
2225
  status_type = status_type
2158
2226
  if status_type is None:
2159
2227
  status_type = "Linear"
@@ -2161,31 +2229,47 @@ class TOMWrapper:
2161
2229
  status_type = status_type.title().replace(" ", "")
2162
2230
 
2163
2231
  if status_type not in valid_status_types:
2164
- raise ValueError(f"{icons.red_dot} '{status_type}' is an invalid status_type. Please choose from these options: {valid_status_types}.")
2232
+ raise ValueError(
2233
+ f"{icons.red_dot} '{status_type}' is an invalid status_type. Please choose from these options: {valid_status_types}."
2234
+ )
2165
2235
 
2166
2236
  if status_type in ["Linear", "LinearReversed"]:
2167
2237
  if upper_bound is not None or lower_mid_bound is not None:
2168
- raise ValueError(f"{icons.red_dot} The 'upper_mid_bound' and 'lower_mid_bound' parameters are not used in the 'Linear' and 'LinearReversed' status types. Make sure these parameters are set to None.")
2238
+ raise ValueError(
2239
+ f"{icons.red_dot} The 'upper_mid_bound' and 'lower_mid_bound' parameters are not used in the 'Linear' and 'LinearReversed' status types. Make sure these parameters are set to None."
2240
+ )
2169
2241
 
2170
2242
  elif upper_bound <= lower_bound:
2171
- raise ValueError(f"{icons.red_dot} The upper_bound must be greater than the lower_bound.")
2243
+ raise ValueError(
2244
+ f"{icons.red_dot} The upper_bound must be greater than the lower_bound."
2245
+ )
2172
2246
 
2173
2247
  if status_type in ["Centered", "CenteredReversed"]:
2174
2248
  if upper_mid_bound is None or lower_mid_bound is None:
2175
- raise ValueError(f"{icons.red_dot} The 'upper_mid_bound' and 'lower_mid_bound' parameters are necessary in the 'Centered' and 'CenteredReversed' status types.")
2249
+ raise ValueError(
2250
+ f"{icons.red_dot} The 'upper_mid_bound' and 'lower_mid_bound' parameters are necessary in the 'Centered' and 'CenteredReversed' status types."
2251
+ )
2176
2252
  elif upper_bound <= upper_mid_bound:
2177
- raise ValueError(f"{icons.red_dot} The upper_bound must be greater than the upper_mid_bound.")
2253
+ raise ValueError(
2254
+ f"{icons.red_dot} The upper_bound must be greater than the upper_mid_bound."
2255
+ )
2178
2256
  elif upper_mid_bound <= lower_mid_bound:
2179
- raise ValueError(f"{icons.red_dot} The upper_mid_bound must be greater than the lower_mid_bound.")
2257
+ raise ValueError(
2258
+ f"{icons.red_dot} The upper_mid_bound must be greater than the lower_mid_bound."
2259
+ )
2180
2260
  elif lower_mid_bound <= lower_bound:
2181
- raise ValueError(f"{icons.red_dot} The lower_mid_bound must be greater than the lower_bound.")
2261
+ raise ValueError(
2262
+ f"{icons.red_dot} The lower_mid_bound must be greater than the lower_bound."
2263
+ )
2182
2264
 
2183
2265
  try:
2184
2266
  table_name = next(
2185
2267
  m.Parent.Name for m in self.all_measures() if m.Name == measure_name
2186
2268
  )
2187
- except:
2188
- raise ValueError(f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'.")
2269
+ except Exception:
2270
+ raise ValueError(
2271
+ f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'."
2272
+ )
2189
2273
 
2190
2274
  graphics = [
2191
2275
  "Cylinder",
@@ -2208,7 +2292,9 @@ class TOMWrapper:
2208
2292
  ]
2209
2293
 
2210
2294
  if status_graphic not in graphics:
2211
- raise ValueError(f"{icons.red_dot} The '{status_graphic}' status graphic is not valid. Please choose from these options: {graphics}.")
2295
+ raise ValueError(
2296
+ f"{icons.red_dot} The '{status_graphic}' status graphic is not valid. Please choose from these options: {graphics}."
2297
+ )
2212
2298
 
2213
2299
  measure_target = True
2214
2300
 
@@ -2216,16 +2302,18 @@ class TOMWrapper:
2216
2302
  float(target)
2217
2303
  tgt = str(target)
2218
2304
  measure_target = False
2219
- except:
2305
+ except Exception:
2220
2306
  try:
2221
2307
  tgt = next(
2222
2308
  format_dax_object_name(m.Parent.Name, m.Name)
2223
2309
  for m in self.all_measures()
2224
2310
  if m.Name == target
2225
2311
  )
2226
- except:
2227
- raise ValueError(f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'.")
2228
-
2312
+ except Exception:
2313
+ raise ValueError(
2314
+ f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'."
2315
+ )
2316
+
2229
2317
  if measure_target:
2230
2318
  expr = f"var x = [{measure_name}]/[{target}]\nreturn"
2231
2319
  else:
@@ -2246,11 +2334,11 @@ class TOMWrapper:
2246
2334
  kpi.StatusExpression = expr
2247
2335
 
2248
2336
  ms = self.model.Tables[table_name].Measures[measure_name]
2249
- try:
2337
+ if ms.KPI is not None:
2250
2338
  ms.KPI.TargetExpression = tgt
2251
2339
  ms.KPI.StatusGraphic = status_graphic
2252
2340
  ms.KPI.StatusExpression = expr
2253
- except:
2341
+ else:
2254
2342
  ms.KPI = kpi
2255
2343
 
2256
2344
  def set_aggregations(self, table_name: str, agg_table_name: str):
@@ -2269,6 +2357,8 @@ class TOMWrapper:
2269
2357
 
2270
2358
  """
2271
2359
 
2360
+ import Microsoft.AnalysisServices.Tabular as TOM
2361
+
2272
2362
  for c in self.model.Tables[agg_table_name].Columns:
2273
2363
 
2274
2364
  dataType = c.DataType
@@ -2306,7 +2396,7 @@ class TOMWrapper:
2306
2396
  The IsAvailableInMdx property value.
2307
2397
  """
2308
2398
 
2309
- self.model.Tables[table_name].Columns[column_name].IsAvailableInMdx = value
2399
+ self.model.Tables[table_name].Columns[column_name].IsAvailableInMDX = value
2310
2400
 
2311
2401
  def set_summarize_by(
2312
2402
  self, table_name: str, column_name: str, value: Optional[str] = None
@@ -2325,6 +2415,7 @@ class TOMWrapper:
2325
2415
  Defaults to none which resolves to 'Default'.
2326
2416
  `Aggregate valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.aggregatefunction?view=analysisservices-dotnet>`_
2327
2417
  """
2418
+ import Microsoft.AnalysisServices.Tabular as TOM
2328
2419
  import System
2329
2420
 
2330
2421
  values = [
@@ -2348,7 +2439,9 @@ class TOMWrapper:
2348
2439
  )
2349
2440
 
2350
2441
  if value not in values:
2351
- raise ValueError(f"{icons.red_dot} '{value}' is not a valid value for the SummarizeBy property. These are the valid values: {values}.")
2442
+ raise ValueError(
2443
+ f"{icons.red_dot} '{value}' is not a valid value for the SummarizeBy property. These are the valid values: {values}."
2444
+ )
2352
2445
 
2353
2446
  self.model.Tables[table_name].Columns[column_name].SummarizeBy = (
2354
2447
  System.Enum.Parse(TOM.AggregateFunction, value)
@@ -2364,6 +2457,7 @@ class TOMWrapper:
2364
2457
  The DirectLakeBehavior property value.
2365
2458
  `DirectLakeBehavior valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.directlakebehavior?view=analysisservices-dotnet>`_
2366
2459
  """
2460
+ import Microsoft.AnalysisServices.Tabular as TOM
2367
2461
  import System
2368
2462
 
2369
2463
  direct_lake_behavior = direct_lake_behavior.capitalize()
@@ -2377,7 +2471,9 @@ class TOMWrapper:
2377
2471
  dlValues = ["Automatic", "DirectLakeOnly", "DirectQueryOnly"]
2378
2472
 
2379
2473
  if direct_lake_behavior not in dlValues:
2380
- raise ValueError(f"{icons.red_dot} The 'direct_lake_behavior' parameter must be one of these values: {dlValues}.")
2474
+ raise ValueError(
2475
+ f"{icons.red_dot} The 'direct_lake_behavior' parameter must be one of these values: {dlValues}."
2476
+ )
2381
2477
 
2382
2478
  self.model.DirectLakeBehavior = System.Enum.Parse(
2383
2479
  TOM.DirectLakeBehavior, direct_lake_behavior
@@ -2475,10 +2571,14 @@ class TOMWrapper:
2475
2571
  import Microsoft.AnalysisServices.Tabular as TOM
2476
2572
 
2477
2573
  if isinstance(objects, str):
2478
- raise ValueError(f"{icons.red_dot} The 'objects' parameter must be a list of columns/measures.")
2574
+ raise ValueError(
2575
+ f"{icons.red_dot} The 'objects' parameter must be a list of columns/measures."
2576
+ )
2479
2577
 
2480
2578
  if len(objects) == 1:
2481
- raise ValueError(f"{icons.red_dot} There must be more than one object (column/measure) within the objects parameter.")
2579
+ raise ValueError(
2580
+ f"{icons.red_dot} There must be more than one object (column/measure) within the objects parameter."
2581
+ )
2482
2582
 
2483
2583
  expr = ""
2484
2584
  i = 0
@@ -2514,7 +2614,9 @@ class TOMWrapper:
2514
2614
  )
2515
2615
  success = True
2516
2616
  if not success:
2517
- raise ValueError(f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset}' semantic model.")
2617
+ raise ValueError(
2618
+ f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset}' semantic model."
2619
+ )
2518
2620
  else:
2519
2621
  i += 1
2520
2622
 
@@ -2682,7 +2784,7 @@ class TOMWrapper:
2682
2784
  try:
2683
2785
  runId = self.get_annotation_value(object=self.model, name="Vertipaq_Run")
2684
2786
  runId = str(int(runId) + 1)
2685
- except:
2787
+ except Exception:
2686
2788
  runId = "1"
2687
2789
  self.set_annotation(object=self.model, name="Vertipaq_Run", value=runId)
2688
2790
 
@@ -2711,7 +2813,7 @@ class TOMWrapper:
2711
2813
  object=object, name="Vertipaq_RecordCount"
2712
2814
  )
2713
2815
 
2714
- return int(result)
2816
+ return int(result) if result is not None else 0
2715
2817
 
2716
2818
  def records_per_segment(self, object: "TOM.Partition"):
2717
2819
  """
@@ -2736,7 +2838,7 @@ class TOMWrapper:
2736
2838
  object=object, name="Vertipaq_RecordsPerSegment"
2737
2839
  )
2738
2840
 
2739
- return float(result)
2841
+ return float(result) if result is not None else 0
2740
2842
 
2741
2843
  def used_size(self, object: Union["TOM.Hierarchy", "TOM.Relationship"]):
2742
2844
  """
@@ -2761,7 +2863,7 @@ class TOMWrapper:
2761
2863
  elif objType == TOM.ObjectType.Relationship:
2762
2864
  result = self.get_annotation_value(object=object, name="Vertipaq_UsedSize")
2763
2865
 
2764
- return int(result)
2866
+ return int(result) if result is not None else 0
2765
2867
 
2766
2868
  def data_size(self, column: "TOM.Column"):
2767
2869
  """
@@ -2784,7 +2886,7 @@ class TOMWrapper:
2784
2886
  if objType == TOM.ObjectType.Column:
2785
2887
  result = self.get_annotation_value(object=column, name="Vertipaq_DataSize")
2786
2888
 
2787
- return int(result)
2889
+ return int(result) if result is not None else 0
2788
2890
 
2789
2891
  def dictionary_size(self, column: "TOM.Column"):
2790
2892
  """
@@ -2809,7 +2911,7 @@ class TOMWrapper:
2809
2911
  object=column, name="Vertipaq_DictionarySize"
2810
2912
  )
2811
2913
 
2812
- return int(result)
2914
+ return int(result) if result is not None else 0
2813
2915
 
2814
2916
  def total_size(self, object: Union["TOM.Table", "TOM.Column"]):
2815
2917
  """
@@ -2834,7 +2936,7 @@ class TOMWrapper:
2834
2936
  elif objType == TOM.ObjectType.Table:
2835
2937
  result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
2836
2938
 
2837
- return int(result)
2939
+ return int(result) if result is not None else 0
2838
2940
 
2839
2941
  def cardinality(self, column: "TOM.Column"):
2840
2942
  """
@@ -2859,7 +2961,7 @@ class TOMWrapper:
2859
2961
  object=column, name="Vertipaq_Cardinality"
2860
2962
  )
2861
2963
 
2862
- return int(result)
2964
+ return int(result) if result is not None else 0
2863
2965
 
2864
2966
  def depends_on(self, object, dependencies: pd.DataFrame):
2865
2967
  """
@@ -2887,7 +2989,7 @@ class TOMWrapper:
2887
2989
  objParentName = objName
2888
2990
 
2889
2991
  fil = dependencies[
2890
- (dependencies["Object Type"] == objType)
2992
+ (dependencies["Object Type"] == str(objType))
2891
2993
  & (dependencies["Table Name"] == objParentName)
2892
2994
  & (dependencies["Object Name"] == objName)
2893
2995
  ]
@@ -2944,7 +3046,7 @@ class TOMWrapper:
2944
3046
  objParentName = objName
2945
3047
 
2946
3048
  fil = dependencies[
2947
- (dependencies["Referenced Object Type"] == objType)
3049
+ (dependencies["Referenced Object Type"] == str(objType))
2948
3050
  & (dependencies["Referenced Table"] == objParentName)
2949
3051
  & (dependencies["Referenced Object"] == objName)
2950
3052
  ]
@@ -2991,7 +3093,7 @@ class TOMWrapper:
2991
3093
 
2992
3094
  for obj in self.depends_on(object=object, dependencies=dependencies):
2993
3095
  if obj.ObjectType == TOM.ObjectType.Measure:
2994
- if (obj.Parent.Name + obj.Name in object.Expression) or (
3096
+ if (f"{obj.Parent.Name}[{obj.Name}]" in object.Expression) or (
2995
3097
  format_dax_object_name(obj.Parent.Name, obj.Name)
2996
3098
  in object.Expression
2997
3099
  ):
@@ -3015,15 +3117,22 @@ class TOMWrapper:
3015
3117
  """
3016
3118
  import Microsoft.AnalysisServices.Tabular as TOM
3017
3119
 
3018
- def create_pattern(a, b):
3019
- return r"(?<!" + re.escape(a) + r"\[)(?<!" + re.escape(a) + r"'\[)" + re.escape(b)
3120
+ def create_pattern(tableList, b):
3121
+ patterns = [
3122
+ r"(?<!" + re.escape(table) + r"\[)(?<!" + re.escape(table) + r"'\[)"
3123
+ for table in tableList
3124
+ ]
3125
+ combined_pattern = "".join(patterns) + re.escape(b)
3126
+ return combined_pattern
3020
3127
 
3021
3128
  for obj in self.depends_on(object=object, dependencies=dependencies):
3022
3129
  if obj.ObjectType == TOM.ObjectType.Column:
3130
+ tableList = []
3131
+ for c in self.all_columns():
3132
+ if c.Name == obj.Name:
3133
+ tableList.append(c.Parent.Name)
3023
3134
  if (
3024
- re.search(
3025
- create_pattern(obj.Parent.Name, obj.Name), object.Expression
3026
- )
3135
+ re.search(create_pattern(tableList, obj.Name), object.Expression)
3027
3136
  is not None
3028
3137
  ):
3029
3138
  yield obj
@@ -3184,16 +3293,24 @@ class TOMWrapper:
3184
3293
  rolling_window_granularity = rolling_window_granularity.capitalize()
3185
3294
 
3186
3295
  if incremental_granularity not in incGran:
3187
- raise ValueError(f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}.")
3188
-
3296
+ raise ValueError(
3297
+ f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}."
3298
+ )
3299
+
3189
3300
  if rolling_window_granularity not in incGran:
3190
- raise ValueError(f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}.")
3301
+ raise ValueError(
3302
+ f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}."
3303
+ )
3191
3304
 
3192
3305
  if rolling_window_periods < 1:
3193
- raise ValueError(f"{icons.red_dot} Invalid 'rolling_window_periods' value. Must be a value greater than 0.")
3194
-
3306
+ raise ValueError(
3307
+ f"{icons.red_dot} Invalid 'rolling_window_periods' value. Must be a value greater than 0."
3308
+ )
3309
+
3195
3310
  if incremental_periods < 1:
3196
- raise ValueError(f"{icons.red_dot} Invalid 'incremental_periods' value. Must be a value greater than 0.")
3311
+ raise ValueError(
3312
+ f"{icons.red_dot} Invalid 'incremental_periods' value. Must be a value greater than 0."
3313
+ )
3197
3314
 
3198
3315
  t = self.model.Tables[table_name]
3199
3316
 
@@ -3201,7 +3318,9 @@ class TOMWrapper:
3201
3318
  dc = t.Columns[detect_data_changes_column]
3202
3319
 
3203
3320
  if dc.DataType != TOM.DataType.DateTime:
3204
- raise ValueError(f"{icons.red_dot} Invalid 'detect_data_changes_column' parameter. This column must be of DateTime data type.")
3321
+ raise ValueError(
3322
+ f"{icons.red_dot} Invalid 'detect_data_changes_column' parameter. This column must be of DateTime data type."
3323
+ )
3205
3324
 
3206
3325
  rp = TOM.BasicRefreshPolicy()
3207
3326
  rp.IncrementalPeriods = incremental_periods
@@ -3281,16 +3400,24 @@ class TOMWrapper:
3281
3400
  rolling_window_granularity = rolling_window_granularity.capitalize()
3282
3401
 
3283
3402
  if incremental_granularity not in incGran:
3284
- raise ValueError(f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}.")
3285
-
3403
+ raise ValueError(
3404
+ f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}."
3405
+ )
3406
+
3286
3407
  if rolling_window_granularity not in incGran:
3287
- raise ValueError(f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}.")
3408
+ raise ValueError(
3409
+ f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}."
3410
+ )
3288
3411
 
3289
3412
  if rolling_window_periods < 1:
3290
- raise ValueError(f"{icons.red_dot} Invalid 'rolling_window_periods' value. Must be a value greater than 0.")
3413
+ raise ValueError(
3414
+ f"{icons.red_dot} Invalid 'rolling_window_periods' value. Must be a value greater than 0."
3415
+ )
3291
3416
 
3292
3417
  if incremental_periods < 1:
3293
- raise ValueError(f"{icons.red_dot} Invalid 'incremental_periods' value. Must be a value greater than 0.")
3418
+ raise ValueError(
3419
+ f"{icons.red_dot} Invalid 'incremental_periods' value. Must be a value greater than 0."
3420
+ )
3294
3421
 
3295
3422
  date_format = "%m/%d/%Y"
3296
3423
 
@@ -3305,7 +3432,9 @@ class TOMWrapper:
3305
3432
  end_day = date_obj_end.day
3306
3433
 
3307
3434
  if date_obj_end <= date_obj_start:
3308
- raise ValueError(f"{icons.red_dot} Invalid 'start_date' or 'end_date'. The 'end_date' must be after the 'start_date'.")
3435
+ raise ValueError(
3436
+ f"{icons.red_dot} Invalid 'start_date' or 'end_date'. The 'end_date' must be after the 'start_date'."
3437
+ )
3309
3438
 
3310
3439
  t = self.model.Tables[table_name]
3311
3440
 
@@ -3314,14 +3443,18 @@ class TOMWrapper:
3314
3443
  dType = c.DataType
3315
3444
 
3316
3445
  if dType != TOM.DataType.DateTime:
3317
- raise ValueError(f"{icons.red_dot} The {fcName} column is of '{dType}' data type. The column chosen must be of DateTime data type.")
3446
+ raise ValueError(
3447
+ f"{icons.red_dot} The {fcName} column is of '{dType}' data type. The column chosen must be of DateTime data type."
3448
+ )
3318
3449
 
3319
3450
  if detect_data_changes_column is not None:
3320
3451
  dc = t.Columns[detect_data_changes_column]
3321
3452
  dcType = dc.DataType
3322
3453
 
3323
3454
  if dcType != TOM.DataType.DateTime:
3324
- raise ValueError(f"{icons.red_dot} Invalid 'detect_data_changes_column' parameter. This column must be of DateTime data type.")
3455
+ raise ValueError(
3456
+ f"{icons.red_dot} Invalid 'detect_data_changes_column' parameter. This column must be of DateTime data type."
3457
+ )
3325
3458
 
3326
3459
  # Start changes:
3327
3460
 
@@ -3329,7 +3462,9 @@ class TOMWrapper:
3329
3462
  i = 0
3330
3463
  for p in t.Partitions:
3331
3464
  if p.SourceType != TOM.PartitionSourceType.M:
3332
- raise ValueError(f"{icons.red_dot} Invalid partition source type. Incremental refresh can only be set up if the table's partition is an M-partition.")
3465
+ raise ValueError(
3466
+ f"{icons.red_dot} Invalid partition source type. Incremental refresh can only be set up if the table's partition is an M-partition."
3467
+ )
3333
3468
 
3334
3469
  elif i == 0:
3335
3470
  text = p.Expression
@@ -3442,9 +3577,13 @@ class TOMWrapper:
3442
3577
  ht = self.is_hybrid_table(table_name=table_name)
3443
3578
 
3444
3579
  if not ht:
3445
- raise ValueError(f"{icons.red_dot} The `data coverage definition <https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions>`_ property is only applicable to `hybrid tables <https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables>`_. See the documentation: {doc}.")
3580
+ raise ValueError(
3581
+ f"{icons.red_dot} The `data coverage definition <https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions>`_ property is only applicable to `hybrid tables <https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables>`_. See the documentation: {doc}."
3582
+ )
3446
3583
  if p.Mode != TOM.ModeType.DirectQuery:
3447
- raise ValueError(f"{icons.red_dot} The `data coverage definition <https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions>`_ property is only applicable to the DirectQuery partition of a `hybrid table <https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables>`_. See the documentation: {doc}.")
3584
+ raise ValueError(
3585
+ f"{icons.red_dot} The `data coverage definition <https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions>`_ property is only applicable to the DirectQuery partition of a `hybrid table <https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables>`_. See the documentation: {doc}."
3586
+ )
3448
3587
 
3449
3588
  dcd = TOM.DataCoverageDefinition()
3450
3589
  dcd.Expression = expression
@@ -3471,7 +3610,9 @@ class TOMWrapper:
3471
3610
  value = value.capitalize()
3472
3611
 
3473
3612
  if value not in values:
3474
- raise ValueError(f"{icons.red_dot} Invalid encoding hint value. Please choose from these options: {values}.")
3613
+ raise ValueError(
3614
+ f"{icons.red_dot} Invalid encoding hint value. Please choose from these options: {values}."
3615
+ )
3475
3616
 
3476
3617
  self.model.Tables[table_name].Columns[column_name].EncodingHint = (
3477
3618
  System.Enum.Parse(TOM.EncodingHintType, value)
@@ -3513,7 +3654,9 @@ class TOMWrapper:
3513
3654
  value = "Boolean"
3514
3655
 
3515
3656
  if value not in values:
3516
- raise ValueError(f"{icons.red_dot} Invalid data type. Please choose from these options: {values}.")
3657
+ raise ValueError(
3658
+ f"{icons.red_dot} Invalid data type. Please choose from these options: {values}."
3659
+ )
3517
3660
 
3518
3661
  self.model.Tables[table_name].Columns[column_name].DataType = System.Enum.Parse(
3519
3662
  TOM.DataType, value
@@ -3545,45 +3688,66 @@ class TOMWrapper:
3545
3688
  for t in time_intel:
3546
3689
  t = t.capitalize()
3547
3690
  if t not in [time_intel_options]:
3548
- raise ValueError(f"{icons.red_dot} The '{t}' time intelligence variation is not supported. Valid options: {time_intel_options}.")
3691
+ raise ValueError(
3692
+ f"{icons.red_dot} The '{t}' time intelligence variation is not supported. Valid options: {time_intel_options}."
3693
+ )
3549
3694
 
3550
3695
  # Validate measure and extract table name
3551
- for m in self.all_measures():
3552
- if m.Name == measure_name:
3553
- table_name = m.Parent.Name
3696
+ matching_measures = [
3697
+ m.Parent.Name for m in self.all_measures() if m.Name == measure_name
3698
+ ]
3554
3699
 
3555
3700
  if table_name is None:
3556
- raise ValueError(f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset}' semantic model within the '{self._workspace}' workspace.")
3701
+ raise ValueError(
3702
+ f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
3703
+ )
3557
3704
 
3705
+ table_name = matching_measures[0]
3558
3706
  # Validate date table
3559
3707
  if not self.is_date_table(date_table):
3560
- raise ValueError(f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset}' wemantic model within the '{self._workspace}' workspace.")
3708
+ raise ValueError(
3709
+ f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset}' wemantic model within the '{self._workspace}' workspace."
3710
+ )
3561
3711
 
3562
3712
  # Extract date key from date table
3563
- for c in self.all_columns():
3564
- if c.Parent.Name == date_table and c.IsKey:
3565
- date_key = c.Name
3713
+ matching_columns = [
3714
+ c.Name
3715
+ for c in self.all_columns()
3716
+ if c.Parent.Name == date_table and c.IsKey
3717
+ ]
3718
+
3719
+ if not matching_columns:
3720
+ raise ValueError(
3721
+ f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
3722
+ )
3723
+
3724
+ date_key = matching_columns[0]
3566
3725
 
3567
3726
  # Create the new time intelligence measures
3568
3727
  for t in time_intel:
3569
- if t == "MTD":
3570
- expr = f"CALCULATE([{measure_name}],DATES{time_intel}('{date_table}'[{date_key}]))"
3571
- new_meas_name = f"{measure_name} {t}"
3572
- self.add_measure(
3573
- table_name=table_name,
3574
- measure_name=new_meas_name,
3575
- expression=expr,
3576
- )
3577
-
3578
- def update_m_partition(self, table_name: str, partition_name: str, expression: Optional[str | None] = None, mode: Optional[str | None] = None, description: Optional[str | None] = None):
3728
+ expr = f"CALCULATE([{measure_name}],DATES{t}('{date_table}'[{date_key}]))"
3729
+ new_meas_name = f"{measure_name} {t}"
3730
+ self.add_measure(
3731
+ table_name=table_name,
3732
+ measure_name=new_meas_name,
3733
+ expression=expr,
3734
+ )
3579
3735
 
3736
+ def update_m_partition(
3737
+ self,
3738
+ table_name: str,
3739
+ partition_name: str,
3740
+ expression: Optional[str | None] = None,
3741
+ mode: Optional[str | None] = None,
3742
+ description: Optional[str | None] = None,
3743
+ ):
3580
3744
  """
3581
3745
  Updates an M partition for a table within a semantic model.
3582
3746
 
3583
3747
  Parameters
3584
3748
  ----------
3585
3749
  table_name : str
3586
- Name of the table.
3750
+ Name of the table.
3587
3751
  partition_name : str
3588
3752
  Name of the partition.
3589
3753
  expression : str, default=None
@@ -3602,7 +3766,9 @@ class TOMWrapper:
3602
3766
 
3603
3767
  p = self.model.Tables[table_name].Partitions[partition_name]
3604
3768
  if p.SourceType != TOM.PartitionSourceType.M:
3605
- raise ValueError(f"Invalid partition source type. This function is only for M partitions.")
3769
+ raise ValueError(
3770
+ f"{icons.red_dot} Invalid partition source type. This function is only for M partitions."
3771
+ )
3606
3772
  if expression is not None:
3607
3773
  p.Source.Expression = expression
3608
3774
  if mode is not None:
@@ -3610,15 +3776,225 @@ class TOMWrapper:
3610
3776
  if description is not None:
3611
3777
  p.Description = description
3612
3778
 
3613
- def set_sort_by_column(self, table_name: str, column_name: str, sort_by_column: str):
3779
+ def update_measure(
3780
+ self,
3781
+ measure_name: str,
3782
+ expression: Optional[str | None] = None,
3783
+ format_string: Optional[str | None] = None,
3784
+ hidden: Optional[bool | None] = None,
3785
+ description: Optional[str | None] = None,
3786
+ display_folder: Optional[str | None] = None,
3787
+ ):
3788
+ """
3789
+ Updates a measure within a semantic model.
3614
3790
 
3791
+ Parameters
3792
+ ----------
3793
+ measure_name : str
3794
+ Name of the measure.
3795
+ expression : str, default=None
3796
+ DAX expression of the measure.
3797
+ Defaults to None which keeps the existing setting.
3798
+ format_string : str, default=None
3799
+ Format string of the measure.
3800
+ Defaults to None which keeps the existing setting.
3801
+ hidden : bool, default=None
3802
+ Whether the measure will be hidden or visible.
3803
+ Defaults to None which keeps the existing setting.
3804
+ description : str, default=None
3805
+ A description of the measure.
3806
+ Defaults to None which keeps the existing setting.
3807
+ display_folder : str, default=None
3808
+ The display folder in which the measure will reside.
3809
+ Defaults to None which keeps the existing setting.
3810
+ """
3811
+
3812
+ table_name = next(
3813
+ m.Parent.Name for m in self.all_measures() if m.Name == measure_name
3814
+ )
3815
+ m = self.model.Tables[table_name].Measures[measure_name]
3816
+ if expression is not None:
3817
+ m.Expression = expression
3818
+ if format_string is not None:
3819
+ m.FormatString = format_string
3820
+ if hidden is not None:
3821
+ m.IsHidden = hidden
3822
+ if description is not None:
3823
+ m.Description = description
3824
+ if display_folder is not None:
3825
+ m.DisplayFolder = display_folder
3826
+
3827
+ def update_column(
3828
+ self,
3829
+ table_name: str,
3830
+ column_name: str,
3831
+ source_column: Optional[str | None] = None,
3832
+ data_type: Optional[str | None] = None,
3833
+ expression: Optional[str | None] = None,
3834
+ format_string: Optional[str | None] = None,
3835
+ hidden: Optional[bool | None] = None,
3836
+ description: Optional[str | None] = None,
3837
+ display_folder: Optional[str | None] = None,
3838
+ data_category: Optional[str | None] = None,
3839
+ key: Optional[bool | None] = None,
3840
+ summarize_by: Optional[str | None] = None,
3841
+ ):
3842
+ """
3843
+ Updates a column within a semantic model.
3844
+
3845
+ Parameters
3846
+ ----------
3847
+ table_name : str
3848
+ Name of the table in which the column exists.
3849
+ column_name : str
3850
+ Name of the column.
3851
+ source_column : str, default=None
3852
+ The source column for the column (for data columns only).
3853
+ Defaults to None which keeps the existing setting.
3854
+ data_type : str, default=None
3855
+ The data type of the column.
3856
+ Defaults to None which keeps the existing setting.
3857
+ expression : str, default=None
3858
+ The DAX expression of the column (for calculated columns only).
3859
+ Defaults to None which keeps the existing setting.
3860
+ format_string : str, default=None
3861
+ Format string of the column.
3862
+ Defaults to None which keeps the existing setting.
3863
+ hidden : bool, default=None
3864
+ Whether the column will be hidden or visible.
3865
+ Defaults to None which keeps the existing setting.
3866
+ description : str, default=None
3867
+ A description of the column.
3868
+ Defaults to None which keeps the existing setting.
3869
+ display_folder : str, default=None
3870
+ The display folder in which the column will reside.
3871
+ Defaults to None which keeps the existing setting.
3872
+ data_category : str, default=None
3873
+ The data category of the column.
3874
+ Defaults to None which keeps the existing setting.
3875
+ key : bool, default=False
3876
+ Marks the column as the primary key of the table.
3877
+ Defaults to None which keeps the existing setting.
3878
+ summarize_by : str, default=None
3879
+ Sets the value for the Summarize By property of the column.
3880
+ Defaults to None which keeps the existing setting.
3881
+ """
3882
+
3883
+ import Microsoft.AnalysisServices.Tabular as TOM
3884
+ import System
3885
+
3886
+ c = self.model.Tables[table_name].Measures[column_name]
3887
+ if c.Type == TOM.ColumnType.Data:
3888
+ if source_column is not None:
3889
+ c.SourceColumn = source_column
3890
+ if c.Type == TOM.ColumnType.Calculated:
3891
+ if expression is not None:
3892
+ c.Expression = expression
3893
+ if data_type is not None:
3894
+ c.DataType = System.Enum.Parse(TOM.DataType, data_type)
3895
+ if format_string is not None:
3896
+ c.FormatString = format_string
3897
+ if hidden is not None:
3898
+ c.IsHidden = hidden
3899
+ if description is not None:
3900
+ c.Description = description
3901
+ if display_folder is not None:
3902
+ c.DisplayFolder = display_folder
3903
+ if key is not None:
3904
+ c.IsKey = key
3905
+ if data_category is not None:
3906
+ c.DataCategory = data_category
3907
+ if summarize_by is not None:
3908
+ c.SummarizeBy = System.Enum.Parse(TOM.AggregateFunction, summarize_by)
3909
+
3910
+ def update_role(
3911
+ self,
3912
+ role_name: str,
3913
+ model_permission: Optional[str | None] = None,
3914
+ description: Optional[str | None] = None,
3915
+ ):
3916
+ """
3917
+ Updates a role within a semantic model.
3918
+
3919
+ Parameters
3920
+ ----------
3921
+ role_name : str
3922
+ Name of the role.
3923
+ model_permission : str, default=None
3924
+ The model permission for the role.
3925
+ Defaults to None which keeps the existing setting.
3926
+ description : str, default=None
3927
+ The description of the role.
3928
+ Defaults to None which keeps the existing setting.
3929
+ """
3930
+
3931
+ import Microsoft.AnalysisServices.Tabular as TOM
3932
+ import System
3933
+
3934
+ obj = self.model.Roles[role_name]
3935
+
3936
+ if model_permission is not None:
3937
+ obj.ModelPermission = System.Enum.Parse(
3938
+ TOM.ModelPermission, model_permission
3939
+ )
3940
+ if description is not None:
3941
+ obj.Description = description
3942
+
3943
+ def update_calculation_item(
3944
+ self,
3945
+ table_name: str,
3946
+ calculation_item_name: str,
3947
+ expression: Optional[str | None] = None,
3948
+ ordinal: Optional[int | None] = None,
3949
+ format_string_expression: Optional[str | None] = None,
3950
+ description: Optional[str | None] = None,
3951
+ ):
3952
+ """
3953
+ Updates a calculation item within a semantic model.
3954
+
3955
+ Parameters
3956
+ ----------
3957
+ table_name : str
3958
+ Name of the calculation group (table).
3959
+ calculation_item_name : str
3960
+ Name of the calculation item.
3961
+ expression : str, default=None
3962
+ The DAX expression of the calculation item.
3963
+ Defaults to None which keeps the existing setting.
3964
+ ordinal : int, default=None
3965
+ The ordinal of the calculation item.
3966
+ Defaults to None which keeps the existing setting.
3967
+ format_string_expression : str, default=None
3968
+ The format string expression for the calculation item.
3969
+ Defaults to None which keeps the existing setting.
3970
+ description : str, default=None
3971
+ The description of the role.
3972
+ Defaults to None which keeps the existing setting.
3973
+ """
3974
+
3975
+ obj = self.Tables[table_name].CalculationGroup.CalculationItems[
3976
+ calculation_item_name
3977
+ ]
3978
+
3979
+ if expression is not None:
3980
+ obj.Expression = expression
3981
+ if format_string_expression is not None:
3982
+ obj.FormatStringDefinition.Expression = format_string_expression
3983
+ if ordinal is not None:
3984
+ obj.Ordinal = ordinal
3985
+ if description is not None:
3986
+ obj.Description = description
3987
+
3988
+ def set_sort_by_column(
3989
+ self, table_name: str, column_name: str, sort_by_column: str
3990
+ ):
3615
3991
  """
3616
3992
  Sets the sort by column for a column in a semantic model.
3617
3993
 
3618
3994
  Parameters
3619
3995
  ----------
3620
3996
  table_name : str
3621
- Name of the table.
3997
+ Name of the table.
3622
3998
  column_name : str
3623
3999
  Name of the column.
3624
4000
  sort_by_column : str
@@ -3630,25 +4006,52 @@ class TOMWrapper:
3630
4006
  sbc = self.model.Tables[table_name].Columns[sort_by_column]
3631
4007
 
3632
4008
  if sbc.DataType != TOM.DataType.Int64:
3633
- raise ValueError(f"Invalid sort by column data type. The sort by column must be of 'Int64' data type.")
3634
-
4009
+ raise ValueError(
4010
+ f"{icons.red_dot} Invalid sort by column data type. The sort by column must be of 'Int64' data type."
4011
+ )
4012
+
3635
4013
  self.model.Tables[table_name].Columns[column_name].SortByColumn = sbc
3636
4014
 
3637
4015
  def remove_sort_by_column(self, table_name: str, column_name: str):
3638
-
3639
4016
  """
3640
4017
  Removes the sort by column for a column in a semantic model.
3641
4018
 
3642
4019
  Parameters
3643
4020
  ----------
3644
4021
  table_name : str
3645
- Name of the table.
4022
+ Name of the table.
3646
4023
  column_name : str
3647
4024
  Name of the column.
3648
4025
  """
3649
4026
 
3650
4027
  self.model.Tables[table_name].Columns[column_name].SortByColumn = None
3651
4028
 
4029
+ def is_calculated_table(self, table_name: str):
4030
+ """
4031
+ Identifies if a table is a calculated table.
4032
+
4033
+ Parameters
4034
+ ----------
4035
+ table_name : str
4036
+ Name of the table.
4037
+
4038
+ Returns
4039
+ -------
4040
+ bool
4041
+ A boolean value indicating whether the table is a calculated table.
4042
+ """
4043
+
4044
+ import Microsoft.AnalysisServices.Tabular as TOM
4045
+
4046
+ isCalcTable = False
4047
+ t = self.model.Tables[table_name]
4048
+ if t.ObjectType == TOM.ObjectType.Table:
4049
+ if any(
4050
+ p.SourceType == TOM.PartitionSourceType.Calculated for p in t.Partitions
4051
+ ):
4052
+ isCalcTable = True
4053
+ return isCalcTable
4054
+
3652
4055
  def close(self):
3653
4056
  if not self._readonly and self.model is not None:
3654
4057
  self.model.SaveChanges()
@@ -3700,4 +4103,4 @@ def connect_semantic_model(
3700
4103
  try:
3701
4104
  yield tw
3702
4105
  finally:
3703
- tw.close()
4106
+ tw.close()