semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (109) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +53 -1
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +118 -1
  47. sempy_labs/_dax.py +189 -3
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +6 -4
  53. sempy_labs/_generate_semantic_model.py +26 -3
  54. sempy_labs/_git.py +14 -14
  55. sempy_labs/_helper_functions.py +197 -1
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +1 -1
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_databases.py +428 -0
  62. sempy_labs/_mirrored_warehouses.py +2 -0
  63. sempy_labs/_ml_experiments.py +6 -0
  64. sempy_labs/_ml_models.py +7 -1
  65. sempy_labs/_model_bpa.py +215 -181
  66. sempy_labs/_model_bpa_bulk.py +46 -42
  67. sempy_labs/_model_bpa_rules.py +8 -3
  68. sempy_labs/_model_dependencies.py +41 -87
  69. sempy_labs/_notebooks.py +107 -12
  70. sempy_labs/_query_scale_out.py +8 -6
  71. sempy_labs/_refresh_semantic_model.py +299 -49
  72. sempy_labs/_spark.py +12 -5
  73. sempy_labs/_translations.py +2 -0
  74. sempy_labs/_vertipaq.py +89 -86
  75. sempy_labs/_warehouses.py +79 -0
  76. sempy_labs/_workloads.py +128 -0
  77. sempy_labs/_workspace_identity.py +4 -4
  78. sempy_labs/_workspaces.py +14 -1
  79. sempy_labs/admin/__init__.py +2 -0
  80. sempy_labs/admin/_basic_functions.py +131 -43
  81. sempy_labs/admin/_domains.py +18 -18
  82. sempy_labs/directlake/__init__.py +2 -0
  83. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  84. sempy_labs/directlake/_dl_helper.py +4 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +2 -1
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/directlake/_warm_cache.py +10 -9
  91. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  92. sempy_labs/lakehouse/_shortcuts.py +4 -0
  93. sempy_labs/migration/_create_pqt_file.py +5 -2
  94. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  95. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  96. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  97. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  98. sempy_labs/migration/_migration_validation.py +2 -0
  99. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  100. sempy_labs/report/__init__.py +6 -1
  101. sempy_labs/report/_download_report.py +75 -0
  102. sempy_labs/report/_generate_report.py +6 -0
  103. sempy_labs/report/_paginated.py +74 -0
  104. sempy_labs/report/_report_functions.py +6 -0
  105. sempy_labs/report/_report_rebind.py +2 -0
  106. sempy_labs/report/_reportwrapper.py +4 -2
  107. sempy_labs/tom/_model.py +135 -68
  108. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
  109. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
@@ -1332,6 +1332,7 @@ class ReportWrapper:
1332
1332
 
1333
1333
  theme_name = json_file["name"]
1334
1334
  theme_name_full = f"{theme_name}.json"
1335
+ rd = self.rdef
1335
1336
 
1336
1337
  # Add theme.json file to request_body
1337
1338
  file_payload = _conv_b64(json_file)
@@ -1345,11 +1346,12 @@ class ReportWrapper:
1345
1346
  "type": "CustomTheme",
1346
1347
  }
1347
1348
 
1348
- rd = self.rdef
1349
1349
  for _, r in rd.iterrows():
1350
1350
  path = r["path"]
1351
1351
  payload = r["payload"]
1352
- if path != report_path:
1352
+ if path == filePath:
1353
+ pass
1354
+ elif path != report_path:
1353
1355
  _add_part(request_body, path, payload)
1354
1356
  # Update the report.json file
1355
1357
  else:
sempy_labs/tom/_model.py CHANGED
@@ -6,6 +6,7 @@ from datetime import datetime
6
6
  from sempy_labs._helper_functions import (
7
7
  format_dax_object_name,
8
8
  generate_guid,
9
+ _make_list_unique,
9
10
  )
10
11
  from sempy_labs._list_functions import list_relationships
11
12
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
@@ -15,6 +16,7 @@ from typing import List, Iterator, Optional, Union, TYPE_CHECKING
15
16
  from sempy._utils._log import log
16
17
  import sempy_labs._icons as icons
17
18
  from sempy.fabric.exceptions import FabricHTTPException
19
+ import ast
18
20
 
19
21
  if TYPE_CHECKING:
20
22
  import Microsoft.AnalysisServices.Tabular
@@ -49,15 +51,19 @@ class TOMWrapper:
49
51
 
50
52
  self._table_map = {}
51
53
  self._column_map = {}
52
- for t in self.model.Tables:
53
- if len(t.LineageTag) == 0:
54
- t.LineageTag = generate_guid()
55
- self._table_map[t.LineageTag] = t.Name
54
+ self._compat_level = self.model.Model.Database.CompatibilityLevel
56
55
 
57
- for c in self.all_columns():
58
- if len(c.LineageTag) == 0:
59
- c.LineageTag = generate_guid()
60
- self._column_map[c.LineageTag] = [c.Name, c.DataType]
56
+ # Minimum campat level for lineage tags is 1540 (https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.table.lineagetag?view=analysisservices-dotnet#microsoft-analysisservices-tabular-table-lineagetag)
57
+ if self._compat_level >= 1540:
58
+ for t in self.model.Tables:
59
+ if len(t.LineageTag) == 0:
60
+ t.LineageTag = generate_guid()
61
+ self._table_map[t.LineageTag] = t.Name
62
+
63
+ for c in self.all_columns():
64
+ if len(c.LineageTag) == 0:
65
+ c.LineageTag = generate_guid()
66
+ self._column_map[c.LineageTag] = [c.Name, c.DataType]
61
67
 
62
68
  def all_columns(self):
63
69
  """
@@ -885,6 +891,7 @@ class TOMWrapper:
885
891
  precedence: int,
886
892
  description: Optional[str] = None,
887
893
  hidden: bool = False,
894
+ column_name: str = "Name",
888
895
  ):
889
896
  """
890
897
  Adds a `calculation group <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationgroup?view=analysisservices-dotnet>`_ to a semantic model.
@@ -899,9 +906,10 @@ class TOMWrapper:
899
906
  A description of the calculation group.
900
907
  hidden : bool, default=False
901
908
  Whether the calculation group is hidden/visible.
909
+ column_name : str, default="Name"
910
+ The name of the calculation group column.
902
911
  """
903
912
  import Microsoft.AnalysisServices.Tabular as TOM
904
- import System
905
913
 
906
914
  tbl = TOM.Table()
907
915
  tbl.Name = name
@@ -914,6 +922,7 @@ class TOMWrapper:
914
922
  part = TOM.Partition()
915
923
  part.Name = name
916
924
  part.Source = TOM.CalculationGroupSource()
925
+ part.Mode = TOM.ModeType.Import
917
926
  tbl.Partitions.Add(part)
918
927
 
919
928
  sortCol = "Ordinal"
@@ -922,14 +931,14 @@ class TOMWrapper:
922
931
  col1.Name = sortCol
923
932
  col1.SourceColumn = sortCol
924
933
  col1.IsHidden = True
925
- col1.DataType = System.Enum.Parse(TOM.DataType, "Int64")
934
+ col1.DataType = TOM.DataType.Int64
926
935
 
927
936
  tbl.Columns.Add(col1)
928
937
 
929
938
  col2 = TOM.DataColumn()
930
- col2.Name = "Name"
931
- col2.SourceColumn = "Name"
932
- col2.DataType = System.Enum.Parse(TOM.DataType, "String")
939
+ col2.Name = column_name
940
+ col2.SourceColumn = column_name
941
+ col2.DataType = TOM.DataType.String
933
942
  # col.SortByColumn = m.Tables[name].Columns[sortCol]
934
943
  tbl.Columns.Add(col2)
935
944
 
@@ -988,10 +997,13 @@ class TOMWrapper:
988
997
  """
989
998
  import Microsoft.AnalysisServices.Tabular as TOM
990
999
 
991
- cul = TOM.Culture()
992
- cul.Name = language
993
-
994
1000
  if not any(c.Name == language for c in self.model.Cultures):
1001
+ cul = TOM.Culture()
1002
+ cul.Name = language
1003
+ lm = TOM.LinguisticMetadata()
1004
+ lm.ContentType = TOM.ContentType.Json
1005
+ lm.Content = f'{{"Version": "1.0.0", "Language": "{language}"}}'
1006
+ cul.LinguisticMetadata = lm
995
1007
  self.model.Cultures.Add(cul)
996
1008
 
997
1009
  def add_perspective(self, perspective_name: str):
@@ -2109,7 +2121,9 @@ class TOMWrapper:
2109
2121
  if c.Parent.Name == table_name and c.Parent.DataCategory == "Time"
2110
2122
  )
2111
2123
 
2112
- def mark_as_date_table(self, table_name: str, column_name: str):
2124
+ def mark_as_date_table(
2125
+ self, table_name: str, column_name: str, validate: bool = False
2126
+ ):
2113
2127
  """
2114
2128
  Marks a table as a `date table <https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables>`_.
2115
2129
 
@@ -2119,6 +2133,8 @@ class TOMWrapper:
2119
2133
  Name of the table.
2120
2134
  column_name : str
2121
2135
  Name of the date column in the table.
2136
+ validate : bool, default=False
2137
+ If True, performs a validation on if the the date table is viable.
2122
2138
  """
2123
2139
  import Microsoft.AnalysisServices.Tabular as TOM
2124
2140
 
@@ -2129,31 +2145,32 @@ class TOMWrapper:
2129
2145
  f"{icons.red_dot} The column specified in the 'column_name' parameter in this function must be of DateTime data type."
2130
2146
  )
2131
2147
 
2132
- daxQuery = f"""
2133
- define measure '{table_name}'[test] =
2134
- var mn = MIN('{table_name}'[{column_name}])
2135
- var ma = MAX('{table_name}'[{column_name}])
2136
- var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
2137
- var y = DATEDIFF(mn, ma, DAY) + 1
2138
- return if(y = x, 1,0)
2139
-
2140
- EVALUATE
2141
- SUMMARIZECOLUMNS(
2142
- "1",[test]
2143
- )
2144
- """
2145
- df = fabric.evaluate_dax(
2146
- dataset=self._dataset, workspace=self._workspace, dax_string=daxQuery
2147
- )
2148
- value = df["1"].iloc[0]
2149
- if value != "1":
2150
- raise ValueError(
2151
- f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2148
+ if validate:
2149
+ dax_query = f"""
2150
+ define measure '{table_name}'[test] =
2151
+ var mn = MIN('{table_name}'[{column_name}])
2152
+ var ma = MAX('{table_name}'[{column_name}])
2153
+ var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
2154
+ var y = DATEDIFF(mn, ma, DAY) + 1
2155
+ return if(y = x, 1,0)
2156
+
2157
+ EVALUATE
2158
+ SUMMARIZECOLUMNS(
2159
+ "1",[test]
2160
+ )
2161
+ """
2162
+ df = fabric.evaluate_dax(
2163
+ dataset=self._dataset, workspace=self._workspace, dax_string=dax_query
2152
2164
  )
2165
+ value = df["[1]"].iloc[0]
2166
+ if value != "1":
2167
+ raise ValueError(
2168
+ f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2169
+ )
2153
2170
 
2154
2171
  # Mark as a date table
2155
2172
  t.DataCategory = "Time"
2156
- c.Columns[column_name].IsKey = True
2173
+ c.IsKey = True
2157
2174
  print(
2158
2175
  f"{icons.green_dot} The '{table_name}' table has been marked as a date table using the '{column_name}' column as its primary date key."
2159
2176
  )
@@ -2261,17 +2278,21 @@ class TOMWrapper:
2261
2278
  t = self.model.Tables[table_name]
2262
2279
 
2263
2280
  return (
2264
- any(
2265
- p.SourceType == TOM.PartitionSourceType.Calculated
2266
- and "NAMEOF(" in p.Source.Expression
2267
- for p in t.Partitions
2281
+ self.is_field_parameter(table_name=table_name)
2282
+ and t.Columns.Count == 4
2283
+ and any(
2284
+ "NAMEOF(" in p.Source.Expression.replace(" ", "") for p in t.Partitions
2268
2285
  )
2269
2286
  and all(
2270
2287
  "[Value" in c.SourceColumn
2271
2288
  for c in t.Columns
2272
- if c.Type != TOM.ColumnType.RowNumber
2289
+ if c.Type == TOM.ColumnType.Data
2290
+ )
2291
+ and any(
2292
+ ep.Name == "ParameterMetadata"
2293
+ for c in t.Columns
2294
+ for ep in c.ExtendedProperties
2273
2295
  )
2274
- and t.Columns.Count == 4
2275
2296
  )
2276
2297
 
2277
2298
  def is_auto_date_table(self, table_name: str):
@@ -2692,6 +2713,7 @@ class TOMWrapper:
2692
2713
 
2693
2714
  par = TOM.Partition()
2694
2715
  par.Name = name
2716
+ par.Mode = TOM.ModeType.Import
2695
2717
 
2696
2718
  parSource = TOM.CalculatedPartitionSource()
2697
2719
  parSource.Expression = expression
@@ -2949,6 +2971,8 @@ class TOMWrapper:
2949
2971
  runId = "1"
2950
2972
  self.set_annotation(object=self.model, name="Vertipaq_Run", value=runId)
2951
2973
 
2974
+ icons.sll_tags.append("VertipaqAnnotations")
2975
+
2952
2976
  def row_count(self, object: Union["TOM.Partition", "TOM.Table"]):
2953
2977
  """
2954
2978
  Obtains the row count of a table or partition within a semantic model.
@@ -4182,15 +4206,7 @@ class TOMWrapper:
4182
4206
  Name of the column to use for sorting. Must be of integer (Int64) data type.
4183
4207
  """
4184
4208
 
4185
- import Microsoft.AnalysisServices.Tabular as TOM
4186
-
4187
4209
  sbc = self.model.Tables[table_name].Columns[sort_by_column]
4188
-
4189
- if sbc.DataType != TOM.DataType.Int64:
4190
- raise ValueError(
4191
- f"{icons.red_dot} Invalid sort by column data type. The sort by column must be of 'Int64' data type."
4192
- )
4193
-
4194
4210
  self.model.Tables[table_name].Columns[column_name].SortByColumn = sbc
4195
4211
 
4196
4212
  def remove_sort_by_column(self, table_name: str, column_name: str):
@@ -4475,30 +4491,81 @@ class TOMWrapper:
4475
4491
  # with concurrent.futures.ThreadPoolExecutor() as executor:
4476
4492
  # executor.map(process_measure, self.all_measures())
4477
4493
 
4494
+ def set_value_filter_behavior(self, value_filter_behavior: str = "Automatic"):
4495
+ """
4496
+ Sets the `Value Filter Behavior <https://learn.microsoft.com/power-bi/transform-model/value-filter-behavior>`_ property for the semantic model.
4497
+
4498
+ Parameters
4499
+ ----------
4500
+ value_filter_behavior : str , default="Automatic"
4501
+ Determines value filter behavior for SummarizeColumns. Valid options: 'Automatic', 'Independent', 'Coalesced'.
4502
+ """
4503
+
4504
+ import Microsoft.AnalysisServices.Tabular as TOM
4505
+ import System
4506
+
4507
+ value_filter_behavior = value_filter_behavior.capitalize()
4508
+ min_compat = 1606
4509
+
4510
+ if self.model.Model.Database.CompatibilityLevel < min_compat:
4511
+ self.model.Model.Database.CompatibilityLevel = min_compat
4512
+
4513
+ self.model.ValueFilterBehavior = System.Enum.Parse(
4514
+ TOM.ValueFilterBehaviorType, value_filter_behavior
4515
+ )
4516
+
4478
4517
  def close(self):
4518
+
4479
4519
  if not self._readonly and self.model is not None:
4480
4520
 
4481
4521
  import Microsoft.AnalysisServices.Tabular as TOM
4482
4522
 
4483
- # ChangedProperty logic
4484
- for t in self.model.Tables:
4485
- if any(
4486
- p.SourceType == TOM.PartitionSourceType.Entity for p in t.Partitions
4487
- ):
4488
- if t.LineageTag in list(self._table_map.keys()):
4489
- if self._table_map.get(t.LineageTag) != t.Name:
4490
- self.add_changed_property(object=t, property="Name")
4491
-
4492
- for c in self.all_columns():
4493
- if c.LineageTag in list(self._column_map.keys()):
4523
+ # ChangedProperty logic (min compat level is 1567) https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.changedproperty?view=analysisservices-dotnet
4524
+ if self.model.Model.Database.CompatibilityLevel >= 1567:
4525
+ for t in self.model.Tables:
4494
4526
  if any(
4495
4527
  p.SourceType == TOM.PartitionSourceType.Entity
4496
- for p in c.Parent.Partitions
4528
+ for p in t.Partitions
4497
4529
  ):
4498
- if self._column_map.get(c.LineageTag)[0] != c.Name:
4499
- self.add_changed_property(object=c, property="Name")
4500
- if self._column_map.get(c.LineageTag)[1] != c.DataType:
4501
- self.add_changed_property(object=c, property="DataType")
4530
+ if t.LineageTag in list(self._table_map.keys()):
4531
+ if self._table_map.get(t.LineageTag) != t.Name:
4532
+ self.add_changed_property(object=t, property="Name")
4533
+
4534
+ for c in self.all_columns():
4535
+ if c.LineageTag in list(self._column_map.keys()):
4536
+ if any(
4537
+ p.SourceType == TOM.PartitionSourceType.Entity
4538
+ for p in c.Parent.Partitions
4539
+ ):
4540
+ if self._column_map.get(c.LineageTag)[0] != c.Name:
4541
+ self.add_changed_property(object=c, property="Name")
4542
+ if self._column_map.get(c.LineageTag)[1] != c.DataType:
4543
+ self.add_changed_property(object=c, property="DataType")
4544
+
4545
+ # SLL Tags
4546
+ tags = [f"{icons.sll_prefix}{a}" for a in icons.sll_tags]
4547
+ tags.append("SLL")
4548
+
4549
+ if not any(a.Name == icons.sll_ann_name for a in self.model.Annotations):
4550
+ ann_list = _make_list_unique(tags)
4551
+ new_ann_value = str(ann_list).replace("'", '"')
4552
+ self.set_annotation(
4553
+ object=self.model, name=icons.sll_ann_name, value=new_ann_value
4554
+ )
4555
+ else:
4556
+ try:
4557
+ ann_value = self.get_annotation_value(
4558
+ object=self.model, name=icons.sll_ann_name
4559
+ )
4560
+ ann_list = ast.literal_eval(ann_value)
4561
+ ann_list += tags
4562
+ ann_list = _make_list_unique(ann_list)
4563
+ new_ann_value = str(ann_list).replace("'", '"')
4564
+ self.set_annotation(
4565
+ object=self.model, name=icons.sll_ann_name, value=new_ann_value
4566
+ )
4567
+ except Exception:
4568
+ pass
4502
4569
 
4503
4570
  self.model.SaveChanges()
4504
4571