semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (108) hide show
  1. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
  2. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
  3. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +38 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +138 -25
  44. sempy_labs/_capacity_migration.py +161 -60
  45. sempy_labs/_clear_cache.py +3 -3
  46. sempy_labs/_data_pipelines.py +54 -0
  47. sempy_labs/_dataflows.py +4 -0
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +190 -0
  53. sempy_labs/_generate_semantic_model.py +26 -4
  54. sempy_labs/_git.py +15 -15
  55. sempy_labs/_helper_functions.py +186 -11
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +6 -3
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_warehouses.py +2 -0
  62. sempy_labs/_ml_experiments.py +6 -0
  63. sempy_labs/_ml_models.py +6 -0
  64. sempy_labs/_model_bpa.py +11 -6
  65. sempy_labs/_model_bpa_bulk.py +14 -30
  66. sempy_labs/_model_bpa_rules.py +8 -3
  67. sempy_labs/_notebooks.py +111 -15
  68. sempy_labs/_query_scale_out.py +8 -6
  69. sempy_labs/_refresh_semantic_model.py +299 -49
  70. sempy_labs/_spark.py +12 -5
  71. sempy_labs/_sql.py +2 -2
  72. sempy_labs/_translations.py +16 -14
  73. sempy_labs/_vertipaq.py +127 -116
  74. sempy_labs/_warehouses.py +90 -1
  75. sempy_labs/_workloads.py +128 -0
  76. sempy_labs/_workspace_identity.py +4 -4
  77. sempy_labs/_workspaces.py +14 -1
  78. sempy_labs/admin/__init__.py +2 -0
  79. sempy_labs/admin/_basic_functions.py +203 -58
  80. sempy_labs/admin/_domains.py +18 -18
  81. sempy_labs/directlake/__init__.py +2 -0
  82. sempy_labs/directlake/_directlake_schema_sync.py +2 -6
  83. sempy_labs/directlake/_dl_helper.py +4 -1
  84. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +3 -2
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  91. sempy_labs/lakehouse/_shortcuts.py +4 -0
  92. sempy_labs/migration/_create_pqt_file.py +2 -2
  93. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  94. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  95. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  96. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  97. sempy_labs/migration/_migration_validation.py +2 -0
  98. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  99. sempy_labs/report/__init__.py +4 -1
  100. sempy_labs/report/_generate_report.py +16 -14
  101. sempy_labs/report/_paginated.py +74 -0
  102. sempy_labs/report/_report_bpa.py +8 -10
  103. sempy_labs/report/_report_functions.py +19 -19
  104. sempy_labs/report/_report_rebind.py +6 -1
  105. sempy_labs/report/_reportwrapper.py +3 -3
  106. sempy_labs/tom/_model.py +173 -67
  107. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  108. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py CHANGED
@@ -6,6 +6,7 @@ from datetime import datetime
6
6
  from sempy_labs._helper_functions import (
7
7
  format_dax_object_name,
8
8
  generate_guid,
9
+ _make_list_unique,
9
10
  )
10
11
  from sempy_labs._list_functions import list_relationships
11
12
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
@@ -15,6 +16,7 @@ from typing import List, Iterator, Optional, Union, TYPE_CHECKING
15
16
  from sempy._utils._log import log
16
17
  import sempy_labs._icons as icons
17
18
  from sempy.fabric.exceptions import FabricHTTPException
19
+ import ast
18
20
 
19
21
  if TYPE_CHECKING:
20
22
  import Microsoft.AnalysisServices.Tabular
@@ -33,6 +35,8 @@ class TOMWrapper:
33
35
  _workspace: str
34
36
  _readonly: bool
35
37
  _tables_added: List[str]
38
+ _table_map = dict
39
+ _column_map = dict
36
40
 
37
41
  def __init__(self, dataset, workspace, readonly):
38
42
  self._dataset = dataset
@@ -45,6 +49,18 @@ class TOMWrapper:
45
49
  )
46
50
  self.model = self._tom_server.Databases.GetByName(dataset).Model
47
51
 
52
+ self._table_map = {}
53
+ self._column_map = {}
54
+ for t in self.model.Tables:
55
+ if len(t.LineageTag) == 0:
56
+ t.LineageTag = generate_guid()
57
+ self._table_map[t.LineageTag] = t.Name
58
+
59
+ for c in self.all_columns():
60
+ if len(c.LineageTag) == 0:
61
+ c.LineageTag = generate_guid()
62
+ self._column_map[c.LineageTag] = [c.Name, c.DataType]
63
+
48
64
  def all_columns(self):
49
65
  """
50
66
  Outputs a list of all columns within all tables in the semantic model.
@@ -291,8 +307,6 @@ class TOMWrapper:
291
307
  obj.LineageTag = generate_guid()
292
308
  if source_lineage_tag is not None:
293
309
  obj.SourceLineageTag = source_lineage_tag
294
- else:
295
- obj.SourceLineageTag = generate_guid()
296
310
  if detail_rows_expression is not None:
297
311
  drd = TOM.DetailRowsDefinition()
298
312
  drd.Expression = detail_rows_expression
@@ -388,8 +402,6 @@ class TOMWrapper:
388
402
  obj.LineageTag = generate_guid()
389
403
  if source_lineage_tag is not None:
390
404
  obj.SourceLineageTag = source_lineage_tag
391
- else:
392
- obj.SourceLineageTag = generate_guid()
393
405
  self.model.Tables[table_name].Columns.Add(obj)
394
406
 
395
407
  def add_data_column(
@@ -478,8 +490,6 @@ class TOMWrapper:
478
490
  obj.LineageTag = generate_guid()
479
491
  if source_lineage_tag is not None:
480
492
  obj.SourceLineageTag = source_lineage_tag
481
- else:
482
- obj.SourceLineagetTag = generate_guid()
483
493
  self.model.Tables[table_name].Columns.Add(obj)
484
494
 
485
495
  def add_calculated_column(
@@ -568,8 +578,6 @@ class TOMWrapper:
568
578
  obj.LineageTag = generate_guid()
569
579
  if source_lineage_tag is not None:
570
580
  obj.SourceLineageTag = source_lineage_tag
571
- else:
572
- obj.SourceLineagetTag = generate_guid()
573
581
  self.model.Tables[table_name].Columns.Add(obj)
574
582
 
575
583
  def add_calculation_item(
@@ -785,8 +793,6 @@ class TOMWrapper:
785
793
  obj.LineageTag = generate_guid()
786
794
  if source_lineage_tag is not None:
787
795
  obj.SourceLineageTag = source_lineage_tag
788
- else:
789
- obj.SourceLineagetTag = generate_guid()
790
796
  self.model.Tables[table_name].Hierarchies.Add(obj)
791
797
 
792
798
  for col in columns:
@@ -795,7 +801,6 @@ class TOMWrapper:
795
801
  lvl.Name = levels[columns.index(col)]
796
802
  lvl.Ordinal = columns.index(col)
797
803
  lvl.LineageTag = generate_guid()
798
- lvl.SourceLineageTag = generate_guid()
799
804
  self.model.Tables[table_name].Hierarchies[hierarchy_name].Levels.Add(lvl)
800
805
 
801
806
  def add_relationship(
@@ -911,6 +916,7 @@ class TOMWrapper:
911
916
  part = TOM.Partition()
912
917
  part.Name = name
913
918
  part.Source = TOM.CalculationGroupSource()
919
+ part.Mode = TOM.ModeType.Import
914
920
  tbl.Partitions.Add(part)
915
921
 
916
922
  sortCol = "Ordinal"
@@ -969,8 +975,6 @@ class TOMWrapper:
969
975
  exp.LineageTag = generate_guid()
970
976
  if source_lineage_tag is not None:
971
977
  exp.SourceLineageTag = source_lineage_tag
972
- else:
973
- exp.SourceLineageTag = generate_guid()
974
978
  exp.Kind = TOM.ExpressionKind.M
975
979
  exp.Expression = expression
976
980
 
@@ -987,10 +991,13 @@ class TOMWrapper:
987
991
  """
988
992
  import Microsoft.AnalysisServices.Tabular as TOM
989
993
 
990
- cul = TOM.Culture()
991
- cul.Name = language
992
-
993
994
  if not any(c.Name == language for c in self.model.Cultures):
995
+ cul = TOM.Culture()
996
+ cul.Name = language
997
+ lm = TOM.LinguisticMetadata()
998
+ lm.ContentType = TOM.ContentType.Json
999
+ lm.Content = f'{{"Version": "1.0.0", "Language": "{language}"}}'
1000
+ cul.LinguisticMetadata = lm
994
1001
  self.model.Cultures.Add(cul)
995
1002
 
996
1003
  def add_perspective(self, perspective_name: str):
@@ -2108,7 +2115,9 @@ class TOMWrapper:
2108
2115
  if c.Parent.Name == table_name and c.Parent.DataCategory == "Time"
2109
2116
  )
2110
2117
 
2111
- def mark_as_date_table(self, table_name: str, column_name: str):
2118
+ def mark_as_date_table(
2119
+ self, table_name: str, column_name: str, validate: bool = False
2120
+ ):
2112
2121
  """
2113
2122
  Marks a table as a `date table <https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables>`_.
2114
2123
 
@@ -2118,6 +2127,8 @@ class TOMWrapper:
2118
2127
  Name of the table.
2119
2128
  column_name : str
2120
2129
  Name of the date column in the table.
2130
+ validate : bool, default=False
2131
+ If True, performs a validation on if the the date table is viable.
2121
2132
  """
2122
2133
  import Microsoft.AnalysisServices.Tabular as TOM
2123
2134
 
@@ -2128,31 +2139,32 @@ class TOMWrapper:
2128
2139
  f"{icons.red_dot} The column specified in the 'column_name' parameter in this function must be of DateTime data type."
2129
2140
  )
2130
2141
 
2131
- daxQuery = f"""
2132
- define measure '{table_name}'[test] =
2133
- var mn = MIN('{table_name}'[{column_name}])
2134
- var ma = MAX('{table_name}'[{column_name}])
2135
- var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
2136
- var y = DATEDIFF(mn, ma, DAY) + 1
2137
- return if(y = x, 1,0)
2138
-
2139
- EVALUATE
2140
- SUMMARIZECOLUMNS(
2141
- "1",[test]
2142
- )
2143
- """
2144
- df = fabric.evaluate_dax(
2145
- dataset=self._dataset, workspace=self._workspace, dax_string=daxQuery
2146
- )
2147
- value = df["1"].iloc[0]
2148
- if value != "1":
2149
- raise ValueError(
2150
- f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2142
+ if validate:
2143
+ dax_query = f"""
2144
+ define measure '{table_name}'[test] =
2145
+ var mn = MIN('{table_name}'[{column_name}])
2146
+ var ma = MAX('{table_name}'[{column_name}])
2147
+ var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
2148
+ var y = DATEDIFF(mn, ma, DAY) + 1
2149
+ return if(y = x, 1,0)
2150
+
2151
+ EVALUATE
2152
+ SUMMARIZECOLUMNS(
2153
+ "1",[test]
2151
2154
  )
2155
+ """
2156
+ df = fabric.evaluate_dax(
2157
+ dataset=self._dataset, workspace=self._workspace, dax_string=dax_query
2158
+ )
2159
+ value = df["[1]"].iloc[0]
2160
+ if value != "1":
2161
+ raise ValueError(
2162
+ f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2163
+ )
2152
2164
 
2153
2165
  # Mark as a date table
2154
2166
  t.DataCategory = "Time"
2155
- c.Columns[column_name].IsKey = True
2167
+ c.IsKey = True
2156
2168
  print(
2157
2169
  f"{icons.green_dot} The '{table_name}' table has been marked as a date table using the '{column_name}' column as its primary date key."
2158
2170
  )
@@ -2654,8 +2666,6 @@ class TOMWrapper:
2654
2666
  t.LineageTag = generate_guid()
2655
2667
  if source_lineage_tag is not None:
2656
2668
  t.SourceLineageTag = source_lineage_tag
2657
- else:
2658
- t.SourceLineagetTag = generate_guid()
2659
2669
  t.Hidden = hidden
2660
2670
  self.model.Tables.Add(t)
2661
2671
 
@@ -2693,6 +2703,7 @@ class TOMWrapper:
2693
2703
 
2694
2704
  par = TOM.Partition()
2695
2705
  par.Name = name
2706
+ par.Mode = TOM.ModeType.Import
2696
2707
 
2697
2708
  parSource = TOM.CalculatedPartitionSource()
2698
2709
  parSource.Expression = expression
@@ -2710,8 +2721,6 @@ class TOMWrapper:
2710
2721
  t.LineageTag = generate_guid()
2711
2722
  if source_lineage_tag is not None:
2712
2723
  t.SourceLineageTag = source_lineage_tag
2713
- else:
2714
- t.SourceLineagetTag = generate_guid()
2715
2724
  t.Hidden = hidden
2716
2725
  t.Partitions.Add(par)
2717
2726
  self.model.Tables.Add(t)
@@ -2952,6 +2961,8 @@ class TOMWrapper:
2952
2961
  runId = "1"
2953
2962
  self.set_annotation(object=self.model, name="Vertipaq_Run", value=runId)
2954
2963
 
2964
+ icons.sll_tags.append("VertipaqAnnotations")
2965
+
2955
2966
  def row_count(self, object: Union["TOM.Partition", "TOM.Table"]):
2956
2967
  """
2957
2968
  Obtains the row count of a table or partition within a semantic model.
@@ -4185,15 +4196,7 @@ class TOMWrapper:
4185
4196
  Name of the column to use for sorting. Must be of integer (Int64) data type.
4186
4197
  """
4187
4198
 
4188
- import Microsoft.AnalysisServices.Tabular as TOM
4189
-
4190
4199
  sbc = self.model.Tables[table_name].Columns[sort_by_column]
4191
-
4192
- if sbc.DataType != TOM.DataType.Int64:
4193
- raise ValueError(
4194
- f"{icons.red_dot} Invalid sort by column data type. The sort by column must be of 'Int64' data type."
4195
- )
4196
-
4197
4200
  self.model.Tables[table_name].Columns[column_name].SortByColumn = sbc
4198
4201
 
4199
4202
  def remove_sort_by_column(self, table_name: str, column_name: str):
@@ -4281,41 +4284,67 @@ class TOMWrapper:
4281
4284
  icons.default_schema
4282
4285
  )
4283
4286
  t.SourceLineageTag = f"[{schema_name}].[{entity_name}]"
4284
- else:
4285
- t.SourceLineageTag = generate_guid()
4286
4287
  for c in self.all_columns():
4287
4288
  if len(c.LineageTag) == 0:
4288
4289
  c.LineageTag = generate_guid()
4289
- if len(c.SourceLineageTag) == 0:
4290
- c.SourceLineageTag = generate_guid()
4291
4290
  for m in self.all_measures():
4292
4291
  if len(m.LineageTag) == 0:
4293
4292
  m.LineageTag = generate_guid()
4294
- if len(m.SourceLineageTag) == 0:
4295
- m.SourceLineageTag = generate_guid()
4296
4293
  for h in self.all_hierarchies():
4297
4294
  if len(h.LineageTag) == 0:
4298
4295
  h.LineageTag = generate_guid()
4299
- if len(h.SourceLineageTag) == 0:
4300
- h.SourceLineageTag = generate_guid()
4301
4296
  for lvl in self.all_levels():
4302
4297
  if len(lvl.LineageTag) == 0:
4303
4298
  lvl.LineageTag = generate_guid()
4304
- if len(lvl.SourceLineageTag) == 0:
4305
- lvl.SourceLineageTag = generate_guid()
4306
- for e in self.model.Expressions():
4299
+ for e in self.model.Expressions:
4307
4300
  if len(e.LineageTag) == 0:
4308
4301
  e.LineageTag = generate_guid()
4309
- if len(e.SourceLineageTag) == 0:
4310
- e.SourceLineageTag = generate_guid()
4302
+
4303
+ def add_changed_property(self, object, property: str):
4304
+ """
4305
+ Adds a `ChangedProperty <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.changedproperty.property?view=analysisservices-dotnet#microsoft-analysisservices-tabular-changedproperty-property>`_ property to a semantic model object. Only adds the property if it does not already exist for the object.
4306
+
4307
+ Parameters
4308
+ ----------
4309
+ object : TOM Object
4310
+ The TOM object within the semantic model.
4311
+ property : str
4312
+ The property to set (i.e. 'Name', 'DataType').
4313
+ """
4314
+
4315
+ import Microsoft.AnalysisServices.Tabular as TOM
4316
+
4317
+ # Only add the property if it does not already exist for that object
4318
+ if not any(c.Property == property for c in object.ChangedProperties):
4319
+ cp = TOM.ChangedProperty()
4320
+ cp.Property = property
4321
+ object.ChangedProperties.Add(cp)
4322
+
4323
+ def remove_changed_property(self, object, property: str):
4324
+ """
4325
+ Removes a `ChangedProperty <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.changedproperty.property?view=analysisservices-dotnet#microsoft-analysisservices-tabular-changedproperty-property>`_ property to a semantic model object. Only adds the property if it does not already exist for the object.
4326
+
4327
+ Parameters
4328
+ ----------
4329
+ object : TOM Object
4330
+ The TOM object within the semantic model.
4331
+ property : str
4332
+ The property to set (i.e. 'Name', 'DataType').
4333
+ """
4334
+
4335
+ for cp in object.ChangedProperties:
4336
+ if cp.Property == property:
4337
+ object.ChangedProperties.Remove(cp)
4311
4338
 
4312
4339
  def generate_measure_descriptions(
4313
4340
  self,
4314
4341
  measure_name: Optional[str | List[str]] = None,
4315
4342
  max_batch_size: Optional[int] = 5,
4316
- ):
4343
+ ) -> pd.DataFrame:
4317
4344
  """
4318
- Auto-generates descriptions for measures using an LLM.
4345
+ Auto-generates descriptions for measures using an LLM. This function requires a paid F-sku (Fabric) of F64 or higher.
4346
+ Setting the 'readonly' parameter in connect_semantic_model to True will allow you to see the auto-generated descriptions in a dataframe. Setting the 'readonly' parameter
4347
+ to False will update the descriptions for the measures within the 'measure_name' parameter.
4319
4348
 
4320
4349
  Parameters
4321
4350
  ----------
@@ -4324,9 +4353,21 @@ class TOMWrapper:
4324
4353
  Defaults to None which generates descriptions for all measures in the semantic model.
4325
4354
  max_batch_size : int, default=5
4326
4355
  Sets the max batch size for each API call.
4356
+
4357
+ Returns
4358
+ -------
4359
+ pandas.DataFrame
4360
+ A pandas dataframe showing the updated measure(s) and their new description(s).
4327
4361
  """
4328
4362
 
4363
+ df = pd.DataFrame(
4364
+ columns=["Table Name", "Measure Name", "Expression", "Description"]
4365
+ )
4366
+ data = []
4367
+
4329
4368
  # import concurrent.futures
4369
+ if measure_name is None:
4370
+ measure_name = [m.Name for m in self.all_measures()]
4330
4371
 
4331
4372
  if isinstance(measure_name, str):
4332
4373
  measure_name = [measure_name]
@@ -4383,11 +4424,28 @@ class TOMWrapper:
4383
4424
  if ms_name.startswith("urn: "):
4384
4425
  ms_name = ms_name[5:]
4385
4426
  desc = item.get("description")
4386
- table_name = next(
4387
- m.Parent.Name for m in self.all_measures() if m.Name == ms_name
4427
+ (table_name, expr) = next(
4428
+ (m.Parent.Name, m.Expression)
4429
+ for m in self.all_measures()
4430
+ if m.Name == ms_name
4388
4431
  )
4389
4432
  self.model.Tables[table_name].Measures[ms_name].Description = desc
4390
4433
 
4434
+ # Collect new descriptions in a dataframe
4435
+ new_data = {
4436
+ "Table Name": table_name,
4437
+ "Measure Name": ms_name,
4438
+ "Expression": expr,
4439
+ "Description": desc,
4440
+ }
4441
+
4442
+ data.append(new_data)
4443
+
4444
+ if data:
4445
+ df = pd.concat([df, pd.DataFrame(data)], ignore_index=True)
4446
+
4447
+ return df
4448
+
4391
4449
  # def process_measure(m):
4392
4450
  # table_name = m.Parent.Name
4393
4451
  # m_name = m.Name
@@ -4424,7 +4482,55 @@ class TOMWrapper:
4424
4482
  # executor.map(process_measure, self.all_measures())
4425
4483
 
4426
4484
  def close(self):
4485
+
4427
4486
  if not self._readonly and self.model is not None:
4487
+
4488
+ import Microsoft.AnalysisServices.Tabular as TOM
4489
+
4490
+ # ChangedProperty logic
4491
+ for t in self.model.Tables:
4492
+ if any(
4493
+ p.SourceType == TOM.PartitionSourceType.Entity for p in t.Partitions
4494
+ ):
4495
+ if t.LineageTag in list(self._table_map.keys()):
4496
+ if self._table_map.get(t.LineageTag) != t.Name:
4497
+ self.add_changed_property(object=t, property="Name")
4498
+
4499
+ for c in self.all_columns():
4500
+ if c.LineageTag in list(self._column_map.keys()):
4501
+ if any(
4502
+ p.SourceType == TOM.PartitionSourceType.Entity
4503
+ for p in c.Parent.Partitions
4504
+ ):
4505
+ if self._column_map.get(c.LineageTag)[0] != c.Name:
4506
+ self.add_changed_property(object=c, property="Name")
4507
+ if self._column_map.get(c.LineageTag)[1] != c.DataType:
4508
+ self.add_changed_property(object=c, property="DataType")
4509
+
4510
+ tags = [f"{icons.sll_prefix}{a}" for a in icons.sll_tags]
4511
+ tags.append("SLL")
4512
+
4513
+ if not any(a.Name == icons.sll_ann_name for a in self.model.Annotations):
4514
+ ann_list = _make_list_unique(tags)
4515
+ new_ann_value = str(ann_list).replace("'", '"')
4516
+ self.set_annotation(
4517
+ object=self.model, name=icons.sll_ann_name, value=new_ann_value
4518
+ )
4519
+ else:
4520
+ try:
4521
+ ann_value = self.get_annotation_value(
4522
+ object=self.model, name=icons.sll_ann_name
4523
+ )
4524
+ ann_list = ast.literal_eval(ann_value)
4525
+ ann_list += tags
4526
+ ann_list = _make_list_unique(ann_list)
4527
+ new_ann_value = str(ann_list).replace("'", '"')
4528
+ self.set_annotation(
4529
+ object=self.model, name=icons.sll_ann_name, value=new_ann_value
4530
+ )
4531
+ except Exception:
4532
+ pass
4533
+
4428
4534
  self.model.SaveChanges()
4429
4535
 
4430
4536
  if len(self._tables_added) > 0: