semantic-link-labs 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (52) hide show
  1. semantic_link_labs-0.4.1.dist-info/LICENSE +21 -0
  2. semantic_link_labs-0.4.1.dist-info/METADATA +22 -0
  3. semantic_link_labs-0.4.1.dist-info/RECORD +52 -0
  4. semantic_link_labs-0.4.1.dist-info/WHEEL +5 -0
  5. semantic_link_labs-0.4.1.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +154 -0
  7. sempy_labs/_ai.py +496 -0
  8. sempy_labs/_clear_cache.py +39 -0
  9. sempy_labs/_connections.py +234 -0
  10. sempy_labs/_dax.py +70 -0
  11. sempy_labs/_generate_semantic_model.py +280 -0
  12. sempy_labs/_helper_functions.py +506 -0
  13. sempy_labs/_icons.py +4 -0
  14. sempy_labs/_list_functions.py +1372 -0
  15. sempy_labs/_model_auto_build.py +143 -0
  16. sempy_labs/_model_bpa.py +1354 -0
  17. sempy_labs/_model_dependencies.py +341 -0
  18. sempy_labs/_one_lake_integration.py +155 -0
  19. sempy_labs/_query_scale_out.py +447 -0
  20. sempy_labs/_refresh_semantic_model.py +184 -0
  21. sempy_labs/_tom.py +3766 -0
  22. sempy_labs/_translations.py +378 -0
  23. sempy_labs/_vertipaq.py +893 -0
  24. sempy_labs/directlake/__init__.py +45 -0
  25. sempy_labs/directlake/_directlake_schema_compare.py +110 -0
  26. sempy_labs/directlake/_directlake_schema_sync.py +128 -0
  27. sempy_labs/directlake/_fallback.py +62 -0
  28. sempy_labs/directlake/_get_directlake_lakehouse.py +69 -0
  29. sempy_labs/directlake/_get_shared_expression.py +59 -0
  30. sempy_labs/directlake/_guardrails.py +84 -0
  31. sempy_labs/directlake/_list_directlake_model_calc_tables.py +54 -0
  32. sempy_labs/directlake/_show_unsupported_directlake_objects.py +89 -0
  33. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +81 -0
  34. sempy_labs/directlake/_update_directlake_partition_entity.py +64 -0
  35. sempy_labs/directlake/_warm_cache.py +210 -0
  36. sempy_labs/lakehouse/__init__.py +24 -0
  37. sempy_labs/lakehouse/_get_lakehouse_columns.py +81 -0
  38. sempy_labs/lakehouse/_get_lakehouse_tables.py +250 -0
  39. sempy_labs/lakehouse/_lakehouse.py +85 -0
  40. sempy_labs/lakehouse/_shortcuts.py +296 -0
  41. sempy_labs/migration/__init__.py +29 -0
  42. sempy_labs/migration/_create_pqt_file.py +239 -0
  43. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +429 -0
  44. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +150 -0
  45. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +524 -0
  46. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +165 -0
  47. sempy_labs/migration/_migration_validation.py +227 -0
  48. sempy_labs/migration/_refresh_calc_tables.py +129 -0
  49. sempy_labs/report/__init__.py +35 -0
  50. sempy_labs/report/_generate_report.py +253 -0
  51. sempy_labs/report/_report_functions.py +855 -0
  52. sempy_labs/report/_report_rebind.py +131 -0
sempy_labs/_tom.py ADDED
@@ -0,0 +1,3766 @@
1
+ import sempy
2
+ import sempy.fabric as fabric
3
+ import pandas as pd
4
+ import re
5
+ from datetime import datetime
6
+ from sempy_labs._helper_functions import format_dax_object_name
7
+ from sempy_labs._list_functions import list_relationships
8
+ from sempy_labs._refresh_semantic_model import refresh_semantic_model
9
+ from sempy_labs.directlake._fallback import check_fallback_reason
10
+ from contextlib import contextmanager
11
+ from typing import List, Optional, Union, TYPE_CHECKING
12
+ from sempy._utils._log import log
13
+ import sempy_labs._icons as icons
14
+
15
+ if TYPE_CHECKING:
16
+ import Microsoft.AnalysisServices.Tabular
17
+ import Microsoft.AnalysisServices.Tabular as TOM
18
+
19
+
20
+ checked = "\u2611"
21
+ unchecked = "\u2610"
22
+ start_bold = "\033[1m"
23
+ end_bold = "\033[0m"
24
+
25
+
26
+ class TOMWrapper:
27
+ """
28
+ Convenience wrapper around the TOM object model for a semantic model. Always use connect_semantic_model function to make sure the TOM object is initialized correctly.
29
+ """
30
+
31
+ dataset: str
32
+ workspace: str
33
+ readonly: bool
34
+ tables_added: List[str]
35
+
36
+ def __init__(self, dataset, workspace, readonly):
37
+ self.dataset = dataset
38
+ self.workspace = workspace
39
+ self.readonly = readonly
40
+ self.tables_added = []
41
+
42
+ self.tom_server = fabric.create_tom_server(
43
+ readonly=readonly, workspace=workspace
44
+ )
45
+ self.model = self.tom_server.Databases.GetByName(dataset).Model
46
+
47
+ def all_columns(self):
48
+ """
49
+ Outputs a list of all columns within all tables in the semantic model.
50
+
51
+ Parameters
52
+ ----------
53
+
54
+ Returns
55
+ -------
56
+ Iterator[Microsoft.AnalysisServices.Tabular.Column]
57
+ All columns within the semantic model.
58
+ """
59
+ import Microsoft.AnalysisServices.Tabular as TOM
60
+
61
+ for t in self.model.Tables:
62
+ for c in t.Columns:
63
+ if c.Type != TOM.ColumnType.RowNumber:
64
+ yield c
65
+
66
+ def all_calculated_columns(self):
67
+ """
68
+ Outputs a list of all calculated columns within all tables in the semantic model.
69
+
70
+ Parameters
71
+ ----------
72
+
73
+ Returns
74
+ -------
75
+ Iterator[Microsoft.AnalysisServices.Tabular.Column]
76
+ All calculated columns within the semantic model.
77
+ """
78
+ import Microsoft.AnalysisServices.Tabular as TOM
79
+
80
+ for t in self.model.Tables:
81
+ for c in t.Columns:
82
+ if c.Type == TOM.ColumnType.Calculated:
83
+ yield c
84
+
85
+ def all_calculated_tables(self):
86
+ """
87
+ Outputs a list of all calculated tables in the semantic model.
88
+
89
+ Parameters
90
+ ----------
91
+
92
+ Returns
93
+ -------
94
+ Iterator[Microsoft.AnalysisServices.Tabular.Table]
95
+ All calculated tables within the semantic model.
96
+ """
97
+ import Microsoft.AnalysisServices.Tabular as TOM
98
+
99
+ for t in self.model.Tables:
100
+ if any(p.SourceType == TOM.ColumnType.Calculated for p in t.Partitions):
101
+ yield t
102
+
103
+ def all_calculation_groups(self):
104
+ """
105
+ Outputs a list of all calculation groups in the semantic model.
106
+
107
+ Parameters
108
+ ----------
109
+
110
+ Returns
111
+ -------
112
+ Iterator[Microsoft.AnalysisServices.Tabular.Table]
113
+ All calculation groups within the semantic model.
114
+ """
115
+
116
+ for t in self.model.Tables:
117
+ if t.CalculationGroup is not None:
118
+ yield t
119
+
120
+ def all_measures(self):
121
+ """
122
+ Outputs a list of all measures in the semantic model.
123
+
124
+ Parameters
125
+ ----------
126
+
127
+ Returns
128
+ -------
129
+ Iterator[Microsoft.AnalysisServices.Tabular.Measure]
130
+ All measures within the semantic model.
131
+ """
132
+
133
+ for t in self.model.Tables:
134
+ for m in t.Measures:
135
+ yield m
136
+
137
+ def all_partitions(self):
138
+ """
139
+ Outputs a list of all partitions in the semantic model.
140
+
141
+ Parameters
142
+ ----------
143
+
144
+ Returns
145
+ -------
146
+ Iterator[Microsoft.AnalysisServices.Tabular.Partition]
147
+ All partitions within the semantic model.
148
+ """
149
+
150
+ for t in self.model.Tables:
151
+ for p in t.Partitions:
152
+ yield p
153
+
154
+ def all_hierarchies(self):
155
+ """
156
+ Outputs a list of all hierarchies in the semantic model.
157
+
158
+ Parameters
159
+ ----------
160
+
161
+ Returns
162
+ -------
163
+ Iterator[Microsoft.AnalysisServices.Tabular.Hierarchy]
164
+ All hierarchies within the semantic model.
165
+ """
166
+
167
+ for t in self.model.Tables:
168
+ for h in t.Hierarchies:
169
+ yield h
170
+
171
+ def all_levels(self):
172
+ """
173
+ Outputs a list of all levels in the semantic model.
174
+
175
+ Parameters
176
+ ----------
177
+
178
+ Returns
179
+ -------
180
+ Iterator[Microsoft.AnalysisServices.Tabular.Level]
181
+ All levels within the semantic model.
182
+ """
183
+
184
+ for t in self.model.Tables:
185
+ for h in t.Hierarchies:
186
+ for l in h.Levels:
187
+ yield l
188
+
189
+ def all_calculation_items(self):
190
+ """
191
+ Outputs a list of all calculation items in the semantic model.
192
+
193
+ Parameters
194
+ ----------
195
+
196
+ Returns
197
+ -------
198
+ Iterator[Microsoft.AnalysisServices.Tabular.CalculationItem]
199
+ All calculation items within the semantic model.
200
+ """
201
+
202
+ for t in self.model.Tables:
203
+ if t.CalculationGroup is not None:
204
+ for ci in t.CalculationGroup.CalculationItems:
205
+ yield ci
206
+
207
+ def all_rls(self):
208
+ """
209
+ Outputs a list of all row level security expressions in the semantic model.
210
+
211
+ Parameters
212
+ ----------
213
+
214
+ Returns
215
+ -------
216
+ Iterator[Microsoft.AnalysisServices.Tabular.TablePermission]
217
+ All row level security expressions within the semantic model.
218
+ """
219
+
220
+ for r in self.model.Roles:
221
+ for tp in r.TablePermissions:
222
+ yield tp
223
+
224
+ def add_measure(
225
+ self,
226
+ table_name: str,
227
+ measure_name: str,
228
+ expression: str,
229
+ format_string: Optional[str] = None,
230
+ hidden: Optional[bool] = False,
231
+ description: Optional[str] = None,
232
+ display_folder: Optional[str] = None,
233
+ ):
234
+ """
235
+ Adds a measure to the semantic model.
236
+
237
+ Parameters
238
+ ----------
239
+ table_name : str
240
+ Name of the table in which the measure will be created.
241
+ measure_name : str
242
+ Name of the measure.
243
+ expression : str
244
+ DAX expression of the measure.
245
+ format_string : str, default=None
246
+ Format string of the measure.
247
+ hidden : bool, default=False
248
+ Whether the measure will be hidden or visible.
249
+ description : str, default=None
250
+ A description of the measure.
251
+ display_folder : str, default=None
252
+ The display folder in which the measure will reside.
253
+ """
254
+ import Microsoft.AnalysisServices.Tabular as TOM
255
+
256
+ obj = TOM.Measure()
257
+ obj.Name = measure_name
258
+ obj.Expression = expression
259
+ obj.IsHidden = hidden
260
+ if format_string is not None:
261
+ obj.FormatString = format_string
262
+ if description is not None:
263
+ obj.Description = description
264
+ if display_folder is not None:
265
+ obj.DisplayFolder = display_folder
266
+
267
+ self.model.Tables[table_name].Measures.Add(obj)
268
+
269
+ def add_calculated_table_column(
270
+ self,
271
+ table_name: str,
272
+ column_name: str,
273
+ source_column: str,
274
+ data_type: str,
275
+ format_string: Optional[str] = None,
276
+ hidden: Optional[bool] = False,
277
+ description: Optional[str] = None,
278
+ display_folder: Optional[str] = None,
279
+ data_category: Optional[str] = None,
280
+ key: Optional[bool] = False,
281
+ summarize_by: Optional[str] = None,
282
+ ):
283
+ """
284
+ Adds a calculated table column to a calculated table within a semantic model.
285
+
286
+ Parameters
287
+ ----------
288
+ table_name : str
289
+ Name of the table in which the column will be created.
290
+ column_name : str
291
+ Name of the column.
292
+ source_column : str
293
+ The source column for the column.
294
+ data_type : str
295
+ The data type of the column.
296
+ format_string : str, default=None
297
+ Format string of the column.
298
+ hidden : bool, default=False
299
+ Whether the column will be hidden or visible.
300
+ description : str, default=None
301
+ A description of the column.
302
+ display_folder : str, default=None
303
+ The display folder in which the column will reside.
304
+ data_category : str, default=None
305
+ The data category of the column.
306
+ key : bool, default=False
307
+ Marks the column as the primary key of the table.
308
+ summarize_by : str, default=None
309
+ Sets the value for the Summarize By property of the column.
310
+ Defaults to None resolves to 'Default'.
311
+ """
312
+ import Microsoft.AnalysisServices.Tabular as TOM
313
+ import System
314
+
315
+ data_type = (
316
+ data_type.capitalize()
317
+ .replace("Integer", "Int64")
318
+ .replace("Datetime", "DateTime")
319
+ )
320
+ if summarize_by is None:
321
+ summarize_by = "Default"
322
+ summarize_by = (
323
+ summarize_by.capitalize()
324
+ .replace("Distinctcount", "DistinctCount")
325
+ .replace("Avg", "Average")
326
+ )
327
+
328
+ obj = TOM.CalculatedTableColumn()
329
+ obj.Name = column_name
330
+ obj.SourceColumn = source_column
331
+ obj.DataType = System.Enum.Parse(TOM.DataType, data_type)
332
+ obj.IsHidden = hidden
333
+ obj.IsKey = key
334
+ obj.SummarizeBy = System.Enum.Parse(TOM.AggregateFunction, summarize_by)
335
+ if format_string is not None:
336
+ obj.FormatString = format_string
337
+ if description is not None:
338
+ obj.Description = description
339
+ if display_folder is not None:
340
+ obj.DisplayFolder = display_folder
341
+ if data_category is not None:
342
+ obj.DataCategory = data_category
343
+ self.model.Tables[table_name].Columns.Add(obj)
344
+
345
+ def add_data_column(
346
+ self,
347
+ table_name: str,
348
+ column_name: str,
349
+ source_column: str,
350
+ data_type: str,
351
+ format_string: Optional[str] = None,
352
+ hidden: Optional[bool] = False,
353
+ description: Optional[str] = None,
354
+ display_folder: Optional[str] = None,
355
+ data_category: Optional[str] = None,
356
+ key: Optional[bool] = False,
357
+ summarize_by: Optional[str] = None,
358
+ ):
359
+ """
360
+ Adds a data column to a table within a semantic model.
361
+
362
+ Parameters
363
+ ----------
364
+ table_name : str
365
+ Name of the table in which the column will be created.
366
+ column_name : str
367
+ Name of the column.
368
+ source_column : str
369
+ The source column for the column.
370
+ data_type : str
371
+ The data type of the column.
372
+ format_string : str, default=None
373
+ Format string of the column.
374
+ hidden : bool, default=False
375
+ Whether the column will be hidden or visible.
376
+ description : str, default=None
377
+ A description of the column.
378
+ display_folder : str, default=None
379
+ The display folder in which the column will reside.
380
+ data_category : str, default=None
381
+ The data category of the column.
382
+ key : bool, default=False
383
+ Marks the column as the primary key of the table.
384
+ summarize_by : str, default=None
385
+ Sets the value for the Summarize By property of the column.
386
+ Defaults to None resolves to 'Default'.
387
+ """
388
+ import Microsoft.AnalysisServices.Tabular as TOM
389
+ import System
390
+
391
+ data_type = (
392
+ data_type.capitalize()
393
+ .replace("Integer", "Int64")
394
+ .replace("Datetime", "DateTime")
395
+ )
396
+ if summarize_by is None:
397
+ summarize_by = "Default"
398
+ summarize_by = (
399
+ summarize_by.capitalize()
400
+ .replace("Distinctcount", "DistinctCount")
401
+ .replace("Avg", "Average")
402
+ )
403
+
404
+ obj = TOM.DataColumn()
405
+ obj.Name = column_name
406
+ obj.SourceColumn = source_column
407
+ obj.DataType = System.Enum.Parse(TOM.DataType, data_type)
408
+ obj.IsHidden = hidden
409
+ obj.IsKey = key
410
+ obj.SummarizeBy = System.Enum.Parse(TOM.AggregateFunction, summarize_by)
411
+ if format_string is not None:
412
+ obj.FormatString = format_string
413
+ if description is not None:
414
+ obj.Description = description
415
+ if display_folder is not None:
416
+ obj.DisplayFolder = display_folder
417
+ if data_category is not None:
418
+ obj.DataCategory = data_category
419
+ self.model.Tables[table_name].Columns.Add(obj)
420
+
421
+ def add_calculated_column(
422
+ self,
423
+ table_name: str,
424
+ column_name: str,
425
+ expression: str,
426
+ data_type: str,
427
+ format_string: Optional[str] = None,
428
+ hidden: Optional[bool] = False,
429
+ description: Optional[str] = None,
430
+ display_folder: Optional[str] = None,
431
+ data_category: Optional[str] = None,
432
+ key: Optional[bool] = False,
433
+ summarize_by: Optional[str] = None,
434
+ ):
435
+ """
436
+ Adds a calculated column to a table within a semantic model.
437
+
438
+ Parameters
439
+ ----------
440
+ table_name : str
441
+ Name of the table in which the column will be created.
442
+ column_name : str
443
+ Name of the column.
444
+ expression : str
445
+ The DAX expression for the column.
446
+ data_type : str
447
+ The data type of the column.
448
+ format_string : str, default=None
449
+ Format string of the column.
450
+ hidden : bool, default=False
451
+ Whether the column will be hidden or visible.
452
+ description : str, default=None
453
+ A description of the column.
454
+ display_folder : str, default=None
455
+ The display folder in which the column will reside.
456
+ data_category : str, default=None
457
+ The data category of the column.
458
+ key : bool, default=False
459
+ Marks the column as the primary key of the table.
460
+ summarize_by : str, default=None
461
+ Sets the value for the Summarize By property of the column.
462
+ Defaults to None resolves to 'Default'.
463
+ """
464
+ import Microsoft.AnalysisServices.Tabular as TOM
465
+ import System
466
+
467
+ data_type = (
468
+ data_type.capitalize()
469
+ .replace("Integer", "Int64")
470
+ .replace("Datetime", "DateTime")
471
+ )
472
+ if summarize_by is None:
473
+ summarize_by = "Default"
474
+ summarize_by = (
475
+ summarize_by.capitalize()
476
+ .replace("Distinctcount", "DistinctCount")
477
+ .replace("Avg", "Average")
478
+ )
479
+
480
+ obj = TOM.CalculatedColumn()
481
+ obj.Name = column_name
482
+ obj.Expression = expression
483
+ obj.IsHidden = hidden
484
+ obj.DataType = System.Enum.Parse(TOM.DataType, data_type)
485
+ obj.IsKey = key
486
+ obj.SummarizeBy = System.Enum.Parse(TOM.AggregateFunction, summarize_by)
487
+ if format_string is not None:
488
+ obj.FormatString = format_string
489
+ if description is not None:
490
+ obj.Description = description
491
+ if display_folder is not None:
492
+ obj.DisplayFolder = display_folder
493
+ if data_category is not None:
494
+ obj.DataCategory = data_category
495
+ self.model.Tables[table_name].Columns.Add(obj)
496
+
497
+ def add_calculation_item(
498
+ self,
499
+ table_name: str,
500
+ calculation_item_name: str,
501
+ expression: str,
502
+ ordinal: Optional[int] = None,
503
+ format_string_expression: Optional[str] = None,
504
+ description: Optional[str] = None,
505
+ ):
506
+ """
507
+ Adds a calculation item to a calculation group within a semantic model.
508
+
509
+ Parameters
510
+ ----------
511
+ table_name : str
512
+ Name of the table in which the calculation item will be created.
513
+ calculation_item_name : str
514
+ Name of the calculation item.
515
+ expression : str
516
+ The DAX expression for the calculation item.
517
+ ordinal : int, default=None
518
+ The ordinal of the calculation item.
519
+ format_string_expression : str, default=None
520
+ The format string expression for the calculation item.
521
+ description : str, default=None
522
+ A description of the calculation item.
523
+ """
524
+ import Microsoft.AnalysisServices.Tabular as TOM
525
+
526
+ obj = TOM.CalculationItem()
527
+ fsd = TOM.FormatStringDefinition()
528
+ obj.Name = calculation_item_name
529
+ obj.Expression = expression
530
+ if ordinal is not None:
531
+ obj.Ordinal = ordinal
532
+ if description is not None:
533
+ obj.Description = description
534
+ if format_string_expression is not None:
535
+ obj.FormatStringDefinition = fsd.Expression = format_string_expression
536
+ self.model.Tables[table_name].CalculationGroup.CalculationItems.Add(obj)
537
+
538
+ def add_role(
539
+ self,
540
+ role_name: str,
541
+ model_permission: Optional[str] = None,
542
+ description: Optional[str] = None,
543
+ ):
544
+ """
545
+ Adds a role to a semantic model.
546
+
547
+ Parameters
548
+ ----------
549
+ role_name : str
550
+ Name of the role.
551
+ model_permission : str, default=None
552
+ The model permission for the role.
553
+ Defaults to None which resolves to 'Read'.
554
+ description : str, default=None
555
+ A description of the role.
556
+ """
557
+ import Microsoft.AnalysisServices.Tabular as TOM
558
+ import System
559
+
560
+ if model_permission is None:
561
+ model_permission = "Read"
562
+
563
+ obj = TOM.ModelRole()
564
+ obj.Name = role_name
565
+ obj.ModelPermission = System.Enum.Parse(TOM.ModelPermission, model_permission)
566
+ if description is not None:
567
+ obj.Description = description
568
+ self.model.Roles.Add(obj)
569
+
570
+ def set_rls(self, role_name: str, table_name: str, filter_expression: str):
571
+ """
572
+ Sets the row level security permissions for a table within a role.
573
+
574
+ Parameters
575
+ ----------
576
+ role_name : str
577
+ Name of the role.
578
+ table_name : str
579
+ Name of the table.
580
+ filter_expression : str
581
+ The DAX expression containing the row level security filter expression logic.
582
+ """
583
+ import Microsoft.AnalysisServices.Tabular as TOM
584
+
585
+ tp = TOM.TablePermission()
586
+ tp.Table = self.model.Tables[table_name]
587
+ tp.FilterExpression = filter_expression
588
+
589
+ try:
590
+ self.model.Roles[role_name].TablePermissions[
591
+ table_name
592
+ ].FilterExpression = filter_expression
593
+ except:
594
+ self.model.Roles[role_name].TablePermissions.Add(tp)
595
+
596
+ def set_ols(
597
+ self, role_name: str, table_name: str, column_name: str, permission: str
598
+ ):
599
+ """
600
+ Sets the object level security permissions for a column within a role.
601
+
602
+ Parameters
603
+ ----------
604
+ role_name : str
605
+ Name of the role.
606
+ table_name : str
607
+ Name of the table.
608
+ column_name : str
609
+ Name of the column.
610
+ permission : str
611
+ The object level security permission for the column.
612
+ `Permission valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.metadatapermission?view=analysisservices-dotnet>`_
613
+ """
614
+ import Microsoft.AnalysisServices.Tabular as TOM
615
+ import System
616
+
617
+ permission = permission.capitalize()
618
+
619
+ if permission not in ["Read", "None", "Default"]:
620
+ print(f"ERROR! Invalid 'permission' value.")
621
+ return
622
+
623
+ cp = TOM.ColumnPermission()
624
+ cp.Column = self.model.Tables[table_name].Columns[column_name]
625
+ cp.MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
626
+ try:
627
+ self.model.Roles[role_name].TablePermissions[table_name].ColumnPermissions[
628
+ column_name
629
+ ].MetadataPermission = System.Enum.Parse(TOM.MetadataPermission, permission)
630
+ except:
631
+ self.model.Roles[role_name].TablePermissions[
632
+ table_name
633
+ ].ColumnPermissions.Add(cp)
634
+
635
+ def add_hierarchy(
636
+ self,
637
+ table_name: str,
638
+ hierarchy_name: str,
639
+ columns: List[str],
640
+ levels: Optional[List[str]] = None,
641
+ hierarchy_description: Optional[str] = None,
642
+ hierarchy_hidden: Optional[bool] = False,
643
+ ):
644
+ """
645
+ Adds a hierarchy to a table within a semantic model.
646
+
647
+ Parameters
648
+ ----------
649
+ table_name : str
650
+ Name of the table.
651
+ hierarchy_name : str
652
+ Name of the hierarchy.
653
+ columns : List[str]
654
+ Names of the columns to use within the hierarchy.
655
+ levels : List[str], default=None
656
+ Names of the levels to use within the hierarhcy (instead of the column names).
657
+ hierarchy_description : str, default=None
658
+ A description of the hierarchy.
659
+ hierarchy_hidden : bool, default=False
660
+ Whether the hierarchy is visible or hidden.
661
+ """
662
+ import Microsoft.AnalysisServices.Tabular as TOM
663
+
664
+ if isinstance(columns, str):
665
+ print(
666
+ f"The 'levels' parameter must be a list. For example: ['Continent', 'Country', 'City']"
667
+ )
668
+ return
669
+ if len(columns) == 1:
670
+ print(f"There must be at least 2 levels in order to create a hierarchy.")
671
+ return
672
+
673
+ if levels is None:
674
+ levels = columns
675
+
676
+ if len(columns) != len(levels):
677
+ print(
678
+ f"If specifying level names, you must specify a level for each column."
679
+ )
680
+ return
681
+
682
+ obj = TOM.Hierarchy()
683
+ obj.Name = hierarchy_name
684
+ obj.IsHidden = hierarchy_hidden
685
+ if hierarchy_description is not None:
686
+ obj.Description = hierarchy_description
687
+ self.model.Tables[table_name].Hierarchies.Add(obj)
688
+
689
+ for col in columns:
690
+ lvl = TOM.Level()
691
+ lvl.Column = self.model.Tables[table_name].Columns[col]
692
+ lvl.Name = levels[columns.index(col)]
693
+ lvl.Ordinal = columns.index(col)
694
+ self.model.Tables[table_name].Hierarchies[hierarchy_name].Levels.Add(lvl)
695
+
696
+ def add_relationship(
697
+ self,
698
+ from_table: str,
699
+ from_column: str,
700
+ to_table: str,
701
+ to_column: str,
702
+ from_cardinality: str,
703
+ to_cardinality: str,
704
+ cross_filtering_behavior: Optional[str] = None,
705
+ is_active: Optional[bool] = True,
706
+ security_filtering_behavior: Optional[str] = None,
707
+ rely_on_referential_integrity: Optional[bool] = False,
708
+ ):
709
+ """
710
+ Adds a relationship to a semantic model.
711
+
712
+ Parameters
713
+ ----------
714
+ from_table : str
715
+ Name of the table on the 'from' side of the relationship.
716
+ from_column : str
717
+ Name of the column on the 'from' side of the relationship.
718
+ to_table : str
719
+ Name of the table on the 'to' side of the relationship.
720
+ to_column : str
721
+ Name of the column on the 'to' side of the relationship.
722
+ from_cardinality : str
723
+ The cardinality of the 'from' side of the relationship. Options: ['Many', 'One', 'None'].
724
+ to_cardinality : str
725
+ The cardinality of the 'to' side of the relationship. Options: ['Many', 'One', 'None'].
726
+ cross_filtering_behavior : str, default=None
727
+ Setting for the cross filtering behavior of the relationship. Options: ('Automatic', 'OneDirection', 'BothDirections').
728
+ Defaults to None which resolves to 'Automatic'.
729
+ is_active : bool, default=True
730
+ Setting for whether the relationship is active or not.
731
+ security_filtering_behavior : str, default=None
732
+ Setting for the security filtering behavior of the relationship. Options: ('None', 'OneDirection', 'BothDirections').
733
+ Defaults to None which resolves to 'OneDirection'.
734
+ rely_on_referential_integrity : bool, default=False
735
+ Setting for the rely on referential integrity of the relationship.
736
+ """
737
+ import Microsoft.AnalysisServices.Tabular as TOM
738
+ import System
739
+
740
+ if cross_filtering_behavior is None:
741
+ cross_filtering_behavior = "Automatic"
742
+ if security_filtering_behavior is None:
743
+ security_filtering_behavior = "OneDirection"
744
+
745
+ from_cardinality = from_cardinality.capitalize()
746
+ to_cardinality = to_cardinality.capitalize()
747
+ cross_filtering_behavior = cross_filtering_behavior.capitalize()
748
+ security_filtering_behavior = security_filtering_behavior.capitalize()
749
+ security_filtering_behavior = security_filtering_behavior.replace(
750
+ "direct", "Direct"
751
+ )
752
+ cross_filtering_behavior = cross_filtering_behavior.replace("direct", "Direct")
753
+
754
+ rel = TOM.SingleColumnRelationship()
755
+ rel.FromColumn = self.model.Tables[from_table].Columns[from_column]
756
+ rel.FromCardinality = System.Enum.Parse(
757
+ TOM.RelationshipEndCardinality, from_cardinality
758
+ )
759
+ rel.ToColumn = self.model.Tables[to_table].Columns[to_column]
760
+ rel.ToCardinality = System.Enum.Parse(
761
+ TOM.RelationshipEndCardinality, to_cardinality
762
+ )
763
+ rel.IsActive = is_active
764
+ rel.CrossFilteringBehavior = System.Enum.Parse(
765
+ TOM.CrossFilteringBehavior, cross_filtering_behavior
766
+ )
767
+ rel.SecurityFilteringBehavior = System.Enum.Parse(
768
+ TOM.SecurityFilteringBehavior, security_filtering_behavior
769
+ )
770
+ rel.RelyOnReferentialIntegrity = rely_on_referential_integrity
771
+
772
+ self.model.Relationships.Add(rel)
773
+
774
+ def add_calculation_group(
775
+ self,
776
+ name: str,
777
+ precedence: int,
778
+ description: Optional[str] = None,
779
+ hidden: Optional[bool] = False,
780
+ ):
781
+ """
782
+ Adds a calculation group to a semantic model.
783
+
784
+ Parameters
785
+ ----------
786
+ name : str
787
+ Name of the calculation group.
788
+ precedence : int
789
+ The precedence of the calculation group.
790
+ description : str, default=None
791
+ A description of the calculation group.
792
+ hidden : bool, default=False
793
+ Whether the calculation group is hidden/visible.
794
+ """
795
+ import Microsoft.AnalysisServices.Tabular as TOM
796
+ import System
797
+
798
+ tbl = TOM.Table()
799
+ tbl.Name = name
800
+ tbl.CalculationGroup = TOM.CalculationGroup()
801
+ tbl.CalculationGroup.Precedence = precedence
802
+ tbl.IsHidden = hidden
803
+ if description is not None:
804
+ tbl.Description = description
805
+
806
+ part = TOM.Partition()
807
+ part.Name = name
808
+ part.Source = TOM.CalculationGroupSource()
809
+ tbl.Partitions.Add(part)
810
+
811
+ sortCol = "Ordinal"
812
+
813
+ col1 = TOM.DataColumn()
814
+ col1.Name = sortCol
815
+ col1.SourceColumn = sortCol
816
+ col1.IsHidden = True
817
+ col1.DataType = System.Enum.Parse(TOM.DataType, "Int64")
818
+
819
+ tbl.Columns.Add(col1)
820
+
821
+ col2 = TOM.DataColumn()
822
+ col2.Name = "Name"
823
+ col2.SourceColumn = "Name"
824
+ col2.DataType = System.Enum.Parse(TOM.DataType, "String")
825
+ # col.SortByColumn = m.Tables[name].Columns[sortCol]
826
+ tbl.Columns.Add(col2)
827
+
828
+ self.model.DiscourageImplicitMeasures = True
829
+ self.model.Tables.Add(tbl)
830
+
831
+ def add_expression(
832
+ self, name: str, expression: str, description: Optional[str] = None
833
+ ):
834
+ """
835
+ Adds an expression to a semantic model.
836
+
837
+ Parameters
838
+ ----------
839
+ name : str
840
+ Name of the expression.
841
+ expression: str
842
+ The M expression of the expression.
843
+ description : str, default=None
844
+ A description of the expression.
845
+ """
846
+ import Microsoft.AnalysisServices.Tabular as TOM
847
+
848
+ exp = TOM.NamedExpression()
849
+ exp.Name = name
850
+ if description is not None:
851
+ exp.Description = description
852
+ exp.Kind = TOM.ExpressionKind.M
853
+ exp.Expression = expression
854
+
855
+ self.model.Expressions.Add(exp)
856
+
857
+ def add_translation(self, language: str):
858
+ """
859
+ Adds a translation language (culture) to a semantic model.
860
+
861
+ Parameters
862
+ ----------
863
+ language : str
864
+ The language code (i.e. 'it-IT' for Italian).
865
+ """
866
+ import Microsoft.AnalysisServices.Tabular as TOM
867
+
868
+ cul = TOM.Culture()
869
+ cul.Name = language
870
+
871
+ try:
872
+ self.model.Cultures.Add(cul)
873
+ except:
874
+ pass
875
+
876
+ def add_perspective(self, perspective_name: str):
877
+ """
878
+ Adds a perspective to a semantic model.
879
+
880
+ Parameters
881
+ ----------
882
+ perspective_name : str
883
+ Name of the perspective.
884
+ """
885
+ import Microsoft.AnalysisServices.Tabular as TOM
886
+
887
+ persp = TOM.Perspective()
888
+ persp.Name = perspective_name
889
+ self.model.Perspectives.Add(persp)
890
+
891
+ def add_m_partition(
892
+ self,
893
+ table_name: str,
894
+ partition_name: str,
895
+ expression: str,
896
+ mode: Optional[str] = None,
897
+ description: Optional[str] = None,
898
+ ):
899
+ """
900
+ Adds an M-partition to a table within a semantic model.
901
+
902
+ Parameters
903
+ ----------
904
+ table_name : str
905
+ Name of the table.
906
+ partition_name : str
907
+ Name of the partition.
908
+ expression : str
909
+ The M expression encapsulating the logic for the partition.
910
+ mode : str, default=None
911
+ The query mode for the partition.
912
+ Defaults to None which resolves to 'Import'.
913
+ description : str, default=None
914
+ A description for the partition.
915
+ """
916
+ import Microsoft.AnalysisServices.Tabular as TOM
917
+ import System
918
+
919
+ mode = (
920
+ mode.title()
921
+ .replace("query", "Query")
922
+ .replace(" ", "")
923
+ .replace("lake", "Lake")
924
+ )
925
+
926
+ mp = TOM.MPartitionSource()
927
+ mp.Expression = expression
928
+ p = TOM.Partition()
929
+ p.Name = partition_name
930
+ p.Source = mp
931
+ if description is not None:
932
+ p.Description = description
933
+ if mode is None:
934
+ mode = "Default"
935
+ p.Mode = System.Enum.Parse(TOM.ModeType, mode)
936
+
937
+ self.model.Tables[table_name].Partitions.Add(p)
938
+
939
+ def add_entity_partition(
940
+ self,
941
+ table_name: str,
942
+ entity_name: str,
943
+ expression: Optional[str] = None,
944
+ description: Optional[str] = None,
945
+ ):
946
+ """
947
+ Adds an entity partition to a table within a semantic model.
948
+
949
+ Parameters
950
+ ----------
951
+ table_name : str
952
+ Name of the table.
953
+ entity_name : str
954
+ Name of the lakehouse table.
955
+ expression : TOM Object, default=None
956
+ The expression used by the table.
957
+ Defaults to None which resolves to the 'DatabaseQuery' expression.
958
+ description : str, default=None
959
+ A description for the partition.
960
+ """
961
+ import Microsoft.AnalysisServices.Tabular as TOM
962
+
963
+ ep = TOM.EntityPartitionSource()
964
+ ep.Name = table_name
965
+ ep.EntityName = entity_name
966
+ if expression is None:
967
+ ep.ExpressionSource = self.model.Expressions["DatabaseQuery"]
968
+ else:
969
+ ep.ExpressionSource = expression
970
+ p = TOM.Partition()
971
+ p.Name = table_name
972
+ p.Source = ep
973
+ p.Mode = TOM.ModeType.DirectLake
974
+ if description is not None:
975
+ p.Description = description
976
+
977
+ self.model.Tables[table_name].Partitions.Add(p)
978
+
979
+ def set_alternate_of(
980
+ self,
981
+ table_name: str,
982
+ column_name: str,
983
+ summarization_type: str,
984
+ base_table: str,
985
+ base_column: Optional[str] = None,
986
+ ):
987
+ """
988
+ Sets the 'alternate of' property on a column.
989
+
990
+ Parameters
991
+ ----------
992
+ table_name : str
993
+ Name of the table.
994
+ column_name : str
995
+ Name of the column.
996
+ summarization_type : str
997
+ The summarization type for the column.
998
+ `Summarization valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.alternateof.summarization?view=analysisservices-dotnet#microsoft-analysisservices-tabular-alternateof-summarization>`_
999
+ base_table : str
1000
+ Name of the base table for aggregation.
1001
+ base_column : str
1002
+ Name of the base column for aggregation
1003
+ """
1004
+ import Microsoft.AnalysisServices.Tabular as TOM
1005
+ import System
1006
+
1007
+ if base_column is not None and base_table is None:
1008
+ print(
1009
+ f"ERROR: If you specify the base table you must also specify the base column"
1010
+ )
1011
+
1012
+ summarization_type = (
1013
+ summarization_type.replace(" ", "")
1014
+ .capitalize()
1015
+ .replace("Groupby", "GroupBy")
1016
+ )
1017
+
1018
+ summarizationTypes = ["Sum", "GroupBy", "Count", "Min", "Max"]
1019
+ if summarization_type not in summarizationTypes:
1020
+ print(
1021
+ f"The 'summarization_type' parameter must be one of the following valuse: {summarizationTypes}."
1022
+ )
1023
+ return
1024
+
1025
+ ao = TOM.AlternateOf()
1026
+ ao.Summarization = System.Enum.Parse(TOM.SummarizationType, summarization_type)
1027
+ if base_column is not None:
1028
+ ao.BaseColumn = self.model.Tables[base_table].Columns[base_column]
1029
+ else:
1030
+ ao.BaseTable = self.model.Tables[base_table]
1031
+
1032
+ self.model.Tables[table_name].Columns[column_name].AlternateOf = ao
1033
+
1034
+ # Hide agg table and columns
1035
+ t = self.model.Tables[table_name]
1036
+ t.IsHidden = True
1037
+ for c in t.Columns:
1038
+ c.IsHidden = True
1039
+
1040
+ def remove_alternate_of(self, table_name: str, column_name: str):
1041
+ """
1042
+ Removes the 'alternate of' property on a column.
1043
+
1044
+ Parameters
1045
+ ----------
1046
+ table_name : str
1047
+ Name of the table.
1048
+ column_name : str
1049
+ Name of the column.
1050
+
1051
+ Returns
1052
+ -------
1053
+
1054
+ """
1055
+
1056
+ self.model.Tables[table_name].Columns[column_name].AlternateOf = None
1057
+
1058
+ def get_annotations(
1059
+ self, object
1060
+ ) -> "Microsoft.AnalysisServices.Tabular.Annotation":
1061
+ """
1062
+ Shows all annotations for a given object within a semantic model.
1063
+
1064
+ Parameters
1065
+ ----------
1066
+ object : TOM Object
1067
+ An object (i.e. table/column/measure) within a semantic model.
1068
+
1069
+ Returns
1070
+ -------
1071
+ Microsoft.AnalysisServices.Tabular.Annotation
1072
+ TOM objects of all the annotations on a particular object within the semantic model.
1073
+ """
1074
+
1075
+ # df = pd.DataFrame(columns=['Name', 'Value'])
1076
+
1077
+ for a in object.Annotations:
1078
+ # new_data = {'Name': a.Name, 'Value': a.Value}
1079
+ yield a
1080
+ # df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1081
+
1082
+ def set_annotation(self, object, name: str, value: str):
1083
+ """
1084
+ Sets an annotation on an object within the semantic model.
1085
+
1086
+ Parameters
1087
+ ----------
1088
+ object : TOM Object
1089
+ An object (i.e. table/column/measure) within a semantic model.
1090
+ name : str
1091
+ Name of the annotation.
1092
+ value : str
1093
+ Value of the annotation.
1094
+ """
1095
+ import Microsoft.AnalysisServices.Tabular as TOM
1096
+
1097
+ ann = TOM.Annotation()
1098
+ ann.Name = name
1099
+ ann.Value = value
1100
+
1101
+ try:
1102
+ object.Annotations[name].Value = value
1103
+ except:
1104
+ object.Annotations.Add(ann)
1105
+
1106
+ def get_annotation_value(self, object, name: str):
1107
+ """
1108
+ Obtains the annotation value for a given annotation on an object within the semantic model.
1109
+
1110
+ Parameters
1111
+ ----------
1112
+ object : TOM Object
1113
+ An object (i.e. table/column/measure) within a semantic model.
1114
+ name : str
1115
+ Name of the annotation.
1116
+
1117
+ Returns
1118
+ -------
1119
+ str
1120
+ The annotation value.
1121
+ """
1122
+
1123
+ return object.Annotations[name].Value
1124
+
1125
+ def remove_annotation(self, object, name: str):
1126
+ """
1127
+ Removes an annotation on an object within the semantic model.
1128
+
1129
+ Parameters
1130
+ ----------
1131
+ object : TOM Object
1132
+ An object (i.e. table/column/measure) within a semantic model.
1133
+ name : str
1134
+ Name of the annotation.
1135
+ """
1136
+
1137
+ object.Annotations.Remove(name)
1138
+
1139
+ def clear_annotations(self, object):
1140
+ """
1141
+ Removes all annotations on an object within the semantic model.
1142
+
1143
+ Parameters
1144
+ ----------
1145
+ object : TOM Object
1146
+ An object (i.e. table/column/measure) within a semantic model.
1147
+ """
1148
+
1149
+ object.Annotations.Clear()
1150
+
1151
+ def get_extended_properties(
1152
+ self, object
1153
+ ) -> "Microsoft.AnalysisServices.Tabular.ExtendedProperty":
1154
+ """
1155
+ Retrieves all extended properties on an object within the semantic model.
1156
+
1157
+ Parameters
1158
+ ----------
1159
+ object : TOM Object
1160
+ An object (i.e. table/column/measure) within a semantic model.
1161
+
1162
+ Returns
1163
+ -------
1164
+ Microsoft.AnalysisServices.Tabular.ExtendedPropertiesCollection
1165
+ TOM Objects of all the extended properties.
1166
+ """
1167
+
1168
+ for a in object.ExtendedProperties:
1169
+ yield a
1170
+
1171
+ def set_extended_property(
1172
+ self, object, extended_property_type: str, name: str, value: str
1173
+ ):
1174
+ """
1175
+ Sets an extended property on an object within the semantic model.
1176
+
1177
+ Parameters
1178
+ ----------
1179
+ object : TOM Object
1180
+ An object (i.e. table/column/measure) within a semantic model.
1181
+ extended_property_type : str
1182
+ The extended property type.
1183
+ `Extended property valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedpropertytype?view=analysisservices-dotnet>`_
1184
+ name : str
1185
+ Name of the extended property.
1186
+ value : str
1187
+ Value of the extended property.
1188
+ """
1189
+ import Microsoft.AnalysisServices.Tabular as TOM
1190
+
1191
+ extended_property_type = extended_property_type.title()
1192
+
1193
+ if extended_property_type == "Json":
1194
+ ep = TOM.JsonExtendedProperty()
1195
+ else:
1196
+ ep = TOM.StringExtendedProperty()
1197
+
1198
+ ep.Name = name
1199
+ ep.Value = value
1200
+
1201
+ try:
1202
+ object.ExtendedProperties[name].Value = value
1203
+ except:
1204
+ object.ExtendedProperties.Add(ep)
1205
+
1206
+ def get_extended_property_value(self, object, name: str):
1207
+ """
1208
+ Retrieves the value of an extended property for an object within the semantic model.
1209
+
1210
+ Parameters
1211
+ ----------
1212
+ object : TOM Object
1213
+ An object (i.e. table/column/measure) within a semantic model.
1214
+ name : str
1215
+ Name of the annotation.
1216
+
1217
+ Returns
1218
+ -------
1219
+ str
1220
+ The extended property value.
1221
+ """
1222
+
1223
+ return object.ExtendedProperties[name].Value
1224
+
1225
+ def remove_extended_property(self, object, name: str):
1226
+ """
1227
+ Removes an extended property on an object within the semantic model.
1228
+
1229
+ Parameters
1230
+ ----------
1231
+ object : TOM Object
1232
+ An object (i.e. table/column/measure) within a semantic model.
1233
+ name : str
1234
+ Name of the annotation.
1235
+ """
1236
+
1237
+ object.ExtendedProperties.Remove(name)
1238
+
1239
+ def clear_extended_properties(self, object):
1240
+ """
1241
+ Removes all extended properties on an object within the semantic model.
1242
+
1243
+ Parameters
1244
+ ----------
1245
+ object : TOM Object
1246
+ An object (i.e. table/column/measure) within a semantic model.
1247
+ """
1248
+
1249
+ object.ExtendedProperties.Clear()
1250
+
1251
+ def in_perspective(
1252
+ self,
1253
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1254
+ perspective_name: str,
1255
+ ):
1256
+ """
1257
+ Indicates whether an object is contained within a given perspective.
1258
+
1259
+ Parameters
1260
+ ----------
1261
+ object : TOM Object
1262
+ An object (i.e. table/column/measure) within a semantic model.
1263
+ perspecitve_name : str
1264
+ Name of the perspective.
1265
+
1266
+ Returns
1267
+ -------
1268
+ bool
1269
+ An indication as to whether the object is contained within the given perspective.
1270
+ """
1271
+ import Microsoft.AnalysisServices.Tabular as TOM
1272
+
1273
+ validObjects = [
1274
+ TOM.ObjectType.Table,
1275
+ TOM.ObjectType.Column,
1276
+ TOM.ObjectType.Measure,
1277
+ TOM.ObjectType.Hierarchy,
1278
+ ]
1279
+ objectType = object.ObjectType
1280
+
1281
+ if objectType not in validObjects:
1282
+ print(
1283
+ f"Only the following object types are valid for perspectives: {validObjects}."
1284
+ )
1285
+ return
1286
+
1287
+ object.Model.Perspectives[perspective_name]
1288
+
1289
+ try:
1290
+ if objectType == TOM.ObjectType.Table:
1291
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1292
+ object.Name
1293
+ ]
1294
+ elif objectType == TOM.ObjectType.Column:
1295
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1296
+ object.Parent.Name
1297
+ ].PerspectiveColumns[object.Name]
1298
+ elif objectType == TOM.ObjectType.Measure:
1299
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1300
+ object.Parent.Name
1301
+ ].PerspectiveMeasures[object.Name]
1302
+ elif objectType == TOM.ObjectType.Hierarchy:
1303
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1304
+ object.Parent.Name
1305
+ ].PerspectiveHierarchies[object.Name]
1306
+ return True
1307
+ except:
1308
+ return False
1309
+
1310
+ def add_to_perspective(
1311
+ self,
1312
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1313
+ perspective_name: str,
1314
+ ):
1315
+ """
1316
+ Adds an object to a perspective.
1317
+
1318
+ Parameters
1319
+ ----------
1320
+ object : TOM Object
1321
+ An object (i.e. table/column/measure) within a semantic model.
1322
+ perspective_name : str
1323
+ Name of the perspective.
1324
+ """
1325
+ import Microsoft.AnalysisServices.Tabular as TOM
1326
+
1327
+ validObjects = [
1328
+ TOM.ObjectType.Table,
1329
+ TOM.ObjectType.Column,
1330
+ TOM.ObjectType.Measure,
1331
+ TOM.ObjectType.Hierarchy,
1332
+ ]
1333
+ objectType = object.ObjectType
1334
+
1335
+ if objectType not in validObjects:
1336
+ print(
1337
+ f"Only the following object types are valid for perspectives: {validObjects}."
1338
+ )
1339
+ return
1340
+ try:
1341
+ object.Model.Perspectives[perspective_name]
1342
+ except:
1343
+ print(f"The '{perspective_name}' perspective does not exist.")
1344
+ return
1345
+
1346
+ # try:
1347
+ if objectType == TOM.ObjectType.Table:
1348
+ pt = TOM.PerspectiveTable()
1349
+ pt.Table = object
1350
+ object.Model.Perspectives[perspective_name].PerspectiveTables.Add(pt)
1351
+ elif objectType == TOM.ObjectType.Column:
1352
+ pc = TOM.PerspectiveColumn()
1353
+ pc.Column = object
1354
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1355
+ object.Parent.Name
1356
+ ].PerspectiveColumns.Add(pc)
1357
+ elif objectType == TOM.ObjectType.Measure:
1358
+ pm = TOM.PerspectiveMeasure()
1359
+ pm.Measure = object
1360
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1361
+ object.Parent.Name
1362
+ ].PerspectiveMeasures.Add(pm)
1363
+ elif objectType == TOM.ObjectType.Hierarchy:
1364
+ ph = TOM.PerspectiveHierarchy()
1365
+ ph.Hierarchy = object
1366
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1367
+ object.Parent.Name
1368
+ ].PerspectiveHierarchies.Add(ph)
1369
+ # except:
1370
+ # pass
1371
+
1372
+ def remove_from_perspective(
1373
+ self,
1374
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1375
+ perspective_name: str,
1376
+ ):
1377
+ """
1378
+ Removes an object from a perspective.
1379
+
1380
+ Parameters
1381
+ ----------
1382
+ object : TOM Object
1383
+ An object (i.e. table/column/measure) within a semantic model.
1384
+ perspective_name : str
1385
+ Name of the perspective.
1386
+ """
1387
+ import Microsoft.AnalysisServices.Tabular as TOM
1388
+
1389
+ validObjects = [
1390
+ TOM.ObjectType.Table,
1391
+ TOM.ObjectType.Column,
1392
+ TOM.ObjectType.Measure,
1393
+ TOM.ObjectType.Hierarchy,
1394
+ ]
1395
+ objectType = object.ObjectType
1396
+
1397
+ if objectType not in validObjects:
1398
+ print(
1399
+ f"Only the following object types are valid for perspectives: {validObjects}."
1400
+ )
1401
+ return
1402
+ try:
1403
+ object.Model.Perspectives[perspective_name]
1404
+ except:
1405
+ print(f"The '{perspective_name}' perspective does not exist.")
1406
+ return
1407
+
1408
+ # try:
1409
+ if objectType == TOM.ObjectType.Table:
1410
+ pt = object.Model.Perspectives[perspective_name].PerspectiveTables[
1411
+ object.Name
1412
+ ]
1413
+ object.Model.Perspectives[perspective_name].PerspectiveTables.Remove(pt)
1414
+ elif objectType == TOM.ObjectType.Column:
1415
+ pc = (
1416
+ object.Model.Perspectives[perspective_name]
1417
+ .PerspectiveTables[object.Parent.Name]
1418
+ .PerspectiveColumns[object.Name]
1419
+ )
1420
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1421
+ object.Parent.Name
1422
+ ].PerspectiveColumns.Remove(pc)
1423
+ elif objectType == TOM.ObjectType.Measure:
1424
+ pm = (
1425
+ object.Model.Perspectives[perspective_name]
1426
+ .PerspectiveTables[object.Parent.Name]
1427
+ .PerspectiveMeasures[object.Name]
1428
+ )
1429
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1430
+ object.Parent.Name
1431
+ ].PerspectiveMeasures.Remove(pm)
1432
+ elif objectType == TOM.ObjectType.Hierarchy:
1433
+ ph = (
1434
+ object.Model.Perspectives[perspective_name]
1435
+ .PerspectiveTables[object.Parent.Name]
1436
+ .PerspectiveHierarchies[object.Name]
1437
+ )
1438
+ object.Model.Perspectives[perspective_name].PerspectiveTables[
1439
+ object.Parent.Name
1440
+ ].PerspectiveHierarchies.Remove(ph)
1441
+ # except:
1442
+ # pass
1443
+
1444
+ def set_translation(
1445
+ self,
1446
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1447
+ language: str,
1448
+ property: str,
1449
+ value: str,
1450
+ ):
1451
+ """
1452
+ Sets a translation value for an object's property.
1453
+
1454
+ Parameters
1455
+ ----------
1456
+ object : TOM Object
1457
+ An object (i.e. table/column/measure) within a semantic model.
1458
+ language : str
1459
+ The language code.
1460
+ property : str
1461
+ The property to set. Options: 'Name', 'Description', 'Display Folder'.
1462
+ value : str
1463
+ The transation value.
1464
+ """
1465
+ import Microsoft.AnalysisServices.Tabular as TOM
1466
+
1467
+ self.add_translation(language=language)
1468
+
1469
+ property = property.title()
1470
+
1471
+ validObjects = [
1472
+ TOM.ObjectType.Table,
1473
+ TOM.ObjectType.Column,
1474
+ TOM.ObjectType.Measure,
1475
+ TOM.ObjectType.Hierarchy,
1476
+ ] # , 'Level'
1477
+
1478
+ if object.ObjectType not in validObjects:
1479
+ print(f"Translations can only be set to {validObjects}.")
1480
+ return
1481
+
1482
+ mapping = {
1483
+ "Name": TOM.TranslatedProperty.Caption,
1484
+ "Description": TOM.TranslatedProperty.Description,
1485
+ "Display Folder": TOM.TranslatedProperty.DisplayFolder,
1486
+ }
1487
+
1488
+ prop = mapping.get(property)
1489
+
1490
+ try:
1491
+ object.Model.Cultures[language]
1492
+ except:
1493
+ print(
1494
+ f"The '{language}' translation language does not exist in the semantic model."
1495
+ )
1496
+ return
1497
+
1498
+ object.Model.Cultures[language].ObjectTranslations.SetTranslation(
1499
+ object, prop, value
1500
+ )
1501
+
1502
+ def remove_translation(
1503
+ self,
1504
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1505
+ language: str,
1506
+ ):
1507
+ """
1508
+ Removes an object's translation value.
1509
+
1510
+ Parameters
1511
+ ----------
1512
+ object : TOM Object
1513
+ An object (i.e. table/column/measure) within a semantic model.
1514
+ language : str
1515
+ The language code.
1516
+ """
1517
+ import Microsoft.AnalysisServices.Tabular as TOM
1518
+
1519
+ o = object.Model.Cultures[language].ObjectTranslations[
1520
+ object, TOM.TranslatedProperty.Caption
1521
+ ]
1522
+ object.Model.Cultures[language].ObjectTranslations.Remove(o)
1523
+
1524
+ def remove_object(self, object):
1525
+ """
1526
+ Removes an object from a semantic model.
1527
+
1528
+ Parameters
1529
+ ----------
1530
+ object : TOM Object
1531
+ An object (i.e. table/column/measure) within a semantic model.
1532
+ """
1533
+ import Microsoft.AnalysisServices.Tabular as TOM
1534
+
1535
+ objType = object.ObjectType
1536
+
1537
+ # Have to remove translations and perspectives on the object before removing it.
1538
+ if objType in ["Table", "Column", "Measure", "Hierarchy", "Level"]:
1539
+ for lang in object.Model.Cultures:
1540
+ try:
1541
+ self.remove_translation(object=object, language=lang.Name)
1542
+ except:
1543
+ pass
1544
+ if objType in ["Table", "Column", "Measure", "Hierarchy"]:
1545
+ for persp in object.Model.Perspectives:
1546
+ try:
1547
+ self.remove_from_perspective(
1548
+ object=object, perspective_name=persp.Name
1549
+ )
1550
+ except:
1551
+ pass
1552
+
1553
+ if objType == TOM.ObjectType.Column:
1554
+ object.Parent.Columns.Remove(object.Name)
1555
+ elif objType == TOM.ObjectType.Measure:
1556
+ object.Parent.Measures.Remove(object.Name)
1557
+ elif objType == TOM.ObjectType.Hierarchy:
1558
+ object.Parent.Hierarchies.Remove(object.Name)
1559
+ elif objType == TOM.ObjectType.Level:
1560
+ object.Parent.Levels.Remove(object.Name)
1561
+ elif objType == TOM.ObjectType.Partition:
1562
+ object.Parent.Partitions.Remove(object.Name)
1563
+ elif objType == TOM.ObjectType.Expression:
1564
+ object.Parent.Expressions.Remove(object.Name)
1565
+ elif objType == TOM.ObjectType.DataSource:
1566
+ object.Parent.DataSources.Remove(object.Name)
1567
+ elif objType == TOM.ObjectType.Role:
1568
+ object.Parent.Roles.Remove(object.Name)
1569
+ elif objType == TOM.ObjectType.Relationship:
1570
+ object.Parent.Relationships.Remove(object.Name)
1571
+ elif objType == TOM.ObjectType.Culture:
1572
+ object.Parent.Cultures.Remove(object.Name)
1573
+ elif objType == TOM.ObjectType.Perspective:
1574
+ object.Parent.Perspectives.Remove(object.Name)
1575
+ elif objType == TOM.ObjectType.CalculationItem:
1576
+ object.Parent.CalculationItems.Remove(object.Name)
1577
+ elif objType == TOM.ObjectType.TablePermission:
1578
+ object.Parent.TablePermissions.Remove(object.Name)
1579
+
1580
+ def used_in_relationships(self, object: Union["TOM.Table", "TOM.Column"]):
1581
+ """
1582
+ Shows all relationships in which a table/column is used.
1583
+
1584
+ Parameters
1585
+ ----------
1586
+ object : TOM Object
1587
+ An object (i.e. table/column) within a semantic model.
1588
+
1589
+ Returns
1590
+ -------
1591
+ Microsoft.AnalysisServices.Tabular.RelationshipCollection
1592
+ All relationships in which the table/column is used.
1593
+ """
1594
+ import Microsoft.AnalysisServices.Tabular as TOM
1595
+
1596
+ objType = object.ObjectType
1597
+
1598
+ if objType == TOM.ObjectType.Table:
1599
+ for r in self.model.Relationships:
1600
+ if r.FromTable.Name == object.Name or r.ToTable.Name == object.Name:
1601
+ yield r # , 'Table'
1602
+ elif objType == TOM.ObjectType.Column:
1603
+ for r in self.model.Relationships:
1604
+ if (
1605
+ r.FromTable.Name == object.Parent.Name
1606
+ and r.FromColumn.Name == object.Name
1607
+ ) or (
1608
+ r.ToTable.Name == object.Parent.Name
1609
+ and r.ToColumn.Name == object.Name
1610
+ ):
1611
+ yield r # , 'Column'
1612
+
1613
+ def used_in_levels(self, column: "TOM.Column"):
1614
+ """
1615
+ Shows all levels in which a column is used.
1616
+
1617
+ Parameters
1618
+ ----------
1619
+ object : TOM Object
1620
+ An column object within a semantic model.
1621
+
1622
+ Returns
1623
+ -------
1624
+ Microsoft.AnalysisServices.Tabular.LevelCollection
1625
+ All levels in which the column is used.
1626
+ """
1627
+ import Microsoft.AnalysisServices.Tabular as TOM
1628
+
1629
+ objType = column.ObjectType
1630
+
1631
+ if objType == TOM.ObjectType.Column:
1632
+ for l in self.all_levels():
1633
+ if (
1634
+ l.Parent.Table.Name == column.Parent.Name
1635
+ and l.Column.Name == column.Name
1636
+ ):
1637
+ yield l
1638
+
1639
+ def used_in_hierarchies(self, column: "TOM.Column"):
1640
+ """
1641
+ Shows all hierarchies in which a column is used.
1642
+
1643
+ Parameters
1644
+ ----------
1645
+ object : TOM Object
1646
+ An column object within a semantic model.
1647
+
1648
+ Returns
1649
+ -------
1650
+ Microsoft.AnalysisServices.Tabular.HierarchyCollection
1651
+ All hierarchies in which the column is used.
1652
+ """
1653
+ import Microsoft.AnalysisServices.Tabular as TOM
1654
+
1655
+ objType = column.ObjectType
1656
+
1657
+ if objType == TOM.ObjectType.Column:
1658
+ for l in self.all_levels():
1659
+ if (
1660
+ l.Parent.Table.Name == column.Parent.Name
1661
+ and l.Column.Name == column.Name
1662
+ ):
1663
+ yield l.Parent
1664
+
1665
+ def used_in_sort_by(self, column: "TOM.Column"):
1666
+ """
1667
+ Shows all columns in which a column is used for sorting.
1668
+
1669
+ Parameters
1670
+ ----------
1671
+ object : TOM Object
1672
+ An column object within a semantic model.
1673
+
1674
+ Returns
1675
+ -------
1676
+ Microsoft.AnalysisServices.Tabular.ColumnCollection
1677
+ All columns in which the column is used for sorting.
1678
+ """
1679
+ import Microsoft.AnalysisServices.Tabular as TOM
1680
+
1681
+ objType = column.ObjectType
1682
+
1683
+ if objType == TOM.ObjectType.Column:
1684
+ for c in self.model.Tables[column.Parent.Name].Columns:
1685
+ if c.SortByColumn == column:
1686
+ yield c
1687
+
1688
+ def used_in_rls(
1689
+ self,
1690
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure"],
1691
+ dependencies: pd.DataFrame,
1692
+ ):
1693
+ """
1694
+ Identifies the filter expressions which reference a given object.
1695
+
1696
+ Parameters
1697
+ ----------
1698
+ object : TOM Object
1699
+ An object (i.e. table/column) within a semantic model.
1700
+ dependencies : pandas.DataFrame
1701
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
1702
+
1703
+ Returns
1704
+ -------
1705
+ Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection
1706
+
1707
+ """
1708
+ import Microsoft.AnalysisServices.Tabular as TOM
1709
+
1710
+ objType = object.ObjectType
1711
+
1712
+ df_filt = dependencies[dependencies["Object Type"] == "Rows Allowed"]
1713
+
1714
+ if objType == TOM.ObjectType.Table:
1715
+ fil = df_filt[
1716
+ (df_filt["Referenced Object Type"] == "Table")
1717
+ & (df_filt["Referenced Table"] == object.Name)
1718
+ ]
1719
+ tbls = fil["Table Name"].unique().tolist()
1720
+ for t in self.model.Tables:
1721
+ if t.Name in tbls:
1722
+ yield t
1723
+ elif objType == TOM.ObjectType.Column:
1724
+ fil = df_filt[
1725
+ (df_filt["Referenced Object Type"] == "Column")
1726
+ & (df_filt["Referenced Table"] == object.Parent.Name)
1727
+ & (df_filt["Referenced Object"] == object.Name)
1728
+ ]
1729
+ cols = fil["Full Object Name"].unique().tolist()
1730
+ for c in self.all_columns():
1731
+ if format_dax_object_name(c.Parent.Name, c.Name) in cols:
1732
+ yield c
1733
+ elif objType == TOM.ObjectType.Measure:
1734
+ fil = df_filt[
1735
+ (df_filt["Referenced Object Type"] == "Measure")
1736
+ & (df_filt["Referenced Table"] == object.Parent.Name)
1737
+ & (df_filt["Referenced Object"] == object.Name)
1738
+ ]
1739
+ meas = fil["Object Name"].unique().tolist()
1740
+ for m in self.all_measures():
1741
+ if m.Name in meas:
1742
+ yield m
1743
+
1744
+ def used_in_data_coverage_definition(
1745
+ self,
1746
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure"],
1747
+ dependencies: pd.DataFrame,
1748
+ ):
1749
+ """
1750
+ Identifies the ... which reference a given object.
1751
+
1752
+ Parameters
1753
+ ----------
1754
+ object : TOM Object
1755
+ An object (i.e. table/column) within a semantic model.
1756
+ dependencies : pandas.DataFrame
1757
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
1758
+
1759
+ Returns
1760
+ -------
1761
+ Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection
1762
+ """
1763
+ import Microsoft.AnalysisServices.Tabular as TOM
1764
+
1765
+ objType = object.ObjectType
1766
+
1767
+ df_filt = dependencies[
1768
+ dependencies["Object Type"] == "Data Coverage Definition"
1769
+ ]
1770
+
1771
+ if objType == TOM.ObjectType.Table:
1772
+ fil = df_filt[
1773
+ (df_filt["Referenced Object Type"] == "Table")
1774
+ & (df_filt["Referenced Table"] == object.Name)
1775
+ ]
1776
+ tbls = fil["Table Name"].unique().tolist()
1777
+ for t in self.model.Tables:
1778
+ if t.Name in tbls:
1779
+ yield t
1780
+ elif objType == TOM.ObjectType.Column:
1781
+ fil = df_filt[
1782
+ (df_filt["Referenced Object Type"] == "Column")
1783
+ & (df_filt["Referenced Table"] == object.Parent.Name)
1784
+ & (df_filt["Referenced Object"] == object.Name)
1785
+ ]
1786
+ cols = fil["Full Object Name"].unique().tolist()
1787
+ for c in self.all_columns():
1788
+ if format_dax_object_name(c.Parent.Name, c.Name) in cols:
1789
+ yield c
1790
+ elif objType == TOM.ObjectType.Measure:
1791
+ fil = df_filt[
1792
+ (df_filt["Referenced Object Type"] == "Measure")
1793
+ & (df_filt["Referenced Table"] == object.Parent.Name)
1794
+ & (df_filt["Referenced Object"] == object.Name)
1795
+ ]
1796
+ meas = fil["Object Name"].unique().tolist()
1797
+ for m in self.all_measures():
1798
+ if m.Name in meas:
1799
+ yield m
1800
+
1801
+ def used_in_calc_item(
1802
+ self,
1803
+ object: Union["TOM.Table", "TOM.Column", "TOM.Measure"],
1804
+ dependencies: pd.DataFrame,
1805
+ ):
1806
+ """
1807
+ Identifies the ... which reference a given object.
1808
+
1809
+ Parameters
1810
+ ----------
1811
+ object : TOM Object
1812
+ An object (i.e. table/column) within a semantic model.
1813
+ dependencies : pandas.DataFrame
1814
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
1815
+
1816
+ Returns
1817
+ -------
1818
+ Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection
1819
+ """
1820
+ import Microsoft.AnalysisServices.Tabular as TOM
1821
+
1822
+ objType = object.ObjectType
1823
+
1824
+ df_filt = dependencies[dependencies["Object Type"] == "Calculation Item"]
1825
+
1826
+ if objType == TOM.ObjectType.Table:
1827
+ fil = df_filt[
1828
+ (df_filt["Referenced Object Type"] == "Table")
1829
+ & (df_filt["Referenced Table"] == object.Name)
1830
+ ]
1831
+ tbls = fil["Table Name"].unique().tolist()
1832
+ for t in self.model.Tables:
1833
+ if t.Name in tbls:
1834
+ yield t
1835
+ elif objType == TOM.ObjectType.Column:
1836
+ fil = df_filt[
1837
+ (df_filt["Referenced Object Type"] == "Column")
1838
+ & (df_filt["Referenced Table"] == object.Parent.Name)
1839
+ & (df_filt["Referenced Object"] == object.Name)
1840
+ ]
1841
+ cols = fil["Full Object Name"].unique().tolist()
1842
+ for c in self.all_columns():
1843
+ if format_dax_object_name(c.Parent.Name, c.Name) in cols:
1844
+ yield c
1845
+ elif objType == TOM.ObjectType.Measure:
1846
+ fil = df_filt[
1847
+ (df_filt["Referenced Object Type"] == "Measure")
1848
+ & (df_filt["Referenced Table"] == object.Parent.Name)
1849
+ & (df_filt["Referenced Object"] == object.Name)
1850
+ ]
1851
+ meas = fil["Object Name"].unique().tolist()
1852
+ for m in self.all_measures():
1853
+ if m.Name in meas:
1854
+ yield m
1855
+
1856
+ def hybrid_tables(self):
1857
+ """
1858
+ Outputs the hybrid tables within a semantic model.
1859
+
1860
+ Parameters
1861
+ ----------
1862
+
1863
+ Returns
1864
+ -------
1865
+ Microsoft.AnalysisServices.Tabular.TableCollection
1866
+ All hybrid tables within a semantic model.
1867
+ """
1868
+ import Microsoft.AnalysisServices.Tabular as TOM
1869
+
1870
+ for t in self.model.Tables:
1871
+ if any(p.Mode == TOM.ModeType.Import for p in t.Partitions):
1872
+ if any(p.Mode == TOM.ModeType.DirectQuery for p in t.Partitions):
1873
+ yield t
1874
+
1875
+ def date_tables(self):
1876
+ """
1877
+ Outputs the tables which are marked as date tables within a semantic model.
1878
+
1879
+ Parameters
1880
+ ----------
1881
+
1882
+ Returns
1883
+ -------
1884
+ Microsoft.AnalysisServices.Tabular.TableCollection
1885
+ All tables marked as date tables within a semantic model.
1886
+ """
1887
+ import Microsoft.AnalysisServices.Tabular as TOM
1888
+
1889
+ for t in self.model.Tables:
1890
+ if t.DataCategory == "Time":
1891
+ if any(
1892
+ c.IsKey and c.DataType == TOM.DataType.DateTime for c in t.Columns
1893
+ ):
1894
+ yield t
1895
+
1896
+ def is_hybrid_table(self, table_name: str):
1897
+ """
1898
+ Identifies if a table is a hybrid table.
1899
+
1900
+ Parameters
1901
+ ----------
1902
+ table_name : str
1903
+ Name of the table.
1904
+
1905
+ Returns
1906
+ -------
1907
+ bool
1908
+ Indicates if the table is a hybrid table.
1909
+ """
1910
+ import Microsoft.AnalysisServices.Tabular as TOM
1911
+
1912
+ isHybridTable = False
1913
+
1914
+ if any(
1915
+ p.Mode == TOM.ModeType.Import
1916
+ for p in self.model.Tables[table_name].Partitions
1917
+ ):
1918
+ if any(
1919
+ p.Mode == TOM.ModeType.DirectQuery
1920
+ for p in self.model.Tables[table_name].Partitions
1921
+ ):
1922
+ isHybridTable = True
1923
+
1924
+ return isHybridTable
1925
+
1926
+ def is_date_table(self, table_name: str):
1927
+ """
1928
+ Identifies if a table is marked as a date table.
1929
+
1930
+ Parameters
1931
+ ----------
1932
+ table_name : str
1933
+ Name of the table.
1934
+
1935
+ Returns
1936
+ -------
1937
+ bool
1938
+ Indicates if the table is marked as a date table.
1939
+ """
1940
+ import Microsoft.AnalysisServices.Tabular as TOM
1941
+
1942
+ isDateTable = False
1943
+ t = self.model.Tables[table_name]
1944
+
1945
+ if t.DataCategory == "Time":
1946
+ if any(c.IsKey and c.DataType == TOM.DataType.DateTime for c in t.Columns):
1947
+ isDateTable = True
1948
+
1949
+ return isDateTable
1950
+
1951
+ def mark_as_date_table(self, table_name: str, column_name: str):
1952
+ """
1953
+ Marks a table as a date table.
1954
+
1955
+ Parameters
1956
+ ----------
1957
+ table_name : str
1958
+ Name of the table.
1959
+ column_name : str
1960
+ Name of the date column in the table.
1961
+ """
1962
+ import Microsoft.AnalysisServices.Tabular as TOM
1963
+
1964
+ t = self.model.Tables[table_name]
1965
+ c = t.Columns[column_name]
1966
+ if c.DataType != TOM.DataType.DateTime:
1967
+ print(
1968
+ f"{icons.red_dot} The column specified in the 'column_name' parameter in this function must be of DateTime data type."
1969
+ )
1970
+ return
1971
+
1972
+ daxQuery = f"""
1973
+ define measure '{table_name}'[test] =
1974
+ var mn = MIN('{table_name}'[{column_name}])
1975
+ var ma = MAX('{table_name}'[{column_name}])
1976
+ var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
1977
+ var y = DATEDIFF(mn, ma, DAY) + 1
1978
+ return if(y = x, 1,0)
1979
+
1980
+ EVALUATE
1981
+ SUMMARIZECOLUMNS(
1982
+ "1",[test]
1983
+ )
1984
+ """
1985
+ df = fabric.evaluate_dax(
1986
+ dataset=self.dataset, workspace=self.workspace, dax_string=daxQuery
1987
+ )
1988
+ value = df["1"].iloc[0]
1989
+ if value != "1":
1990
+ print(
1991
+ f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
1992
+ )
1993
+ return
1994
+
1995
+ # Mark as a date table
1996
+ t.DataCategory = "Time"
1997
+ c.Columns[column_name].IsKey = True
1998
+ print(
1999
+ f"{icons.green_dot} The '{table_name}' table has been marked as a date table using the '{column_name}' column as its primary date key."
2000
+ )
2001
+
2002
+ def has_aggs(self):
2003
+ """
2004
+ Identifies if a semantic model has any aggregations.
2005
+
2006
+ Parameters
2007
+ ----------
2008
+
2009
+ Returns
2010
+ -------
2011
+ bool
2012
+ Indicates if the semantic model has any aggregations.
2013
+ """
2014
+
2015
+ hasAggs = False
2016
+
2017
+ for c in self.all_columns():
2018
+ if c.AlterateOf is not None:
2019
+ hasAggs = True
2020
+
2021
+ return hasAggs
2022
+
2023
+ def is_agg_table(self, table_name: str):
2024
+ """
2025
+ Identifies if a table has aggregations.
2026
+
2027
+ Parameters
2028
+ ----------
2029
+ table_name : str
2030
+ Name of the table.
2031
+
2032
+ Returns
2033
+ -------
2034
+ bool
2035
+ Indicates if the table has any aggregations.
2036
+ """
2037
+
2038
+ t = self.model.Tables[table_name]
2039
+
2040
+ return any(c.AlternateOf is not None for c in t.Columns)
2041
+
2042
+ def has_hybrid_table(self):
2043
+ """
2044
+ Identifies if a semantic model has a hybrid table.
2045
+
2046
+ Parameters
2047
+ ----------
2048
+
2049
+ Returns
2050
+ -------
2051
+ bool
2052
+ Indicates if the semantic model has a hybrid table.
2053
+ """
2054
+
2055
+ hasHybridTable = False
2056
+
2057
+ for t in self.model.Tables:
2058
+ if self.is_hybrid_table(table_name=t.Name):
2059
+ hasHybridTable = True
2060
+
2061
+ return hasHybridTable
2062
+
2063
+ def has_date_table(self):
2064
+ """
2065
+ Identifies if a semantic model has a table marked as a date table.
2066
+
2067
+ Parameters
2068
+ ----------
2069
+
2070
+ Returns
2071
+ -------
2072
+ bool
2073
+ Indicates if the semantic model has a table marked as a date table.
2074
+ """
2075
+
2076
+ hasDateTable = False
2077
+
2078
+ for t in self.model.Tables:
2079
+ if self.is_date_table(table_name=t.Name):
2080
+ hasDateTable = True
2081
+
2082
+ return hasDateTable
2083
+
2084
+ def is_direct_lake(self):
2085
+ """
2086
+ Identifies if a semantic model is in Direct Lake mode.
2087
+
2088
+ Parameters
2089
+ ----------
2090
+
2091
+ Returns
2092
+ -------
2093
+ bool
2094
+ Indicates if the semantic model is in Direct Lake mode.
2095
+ """
2096
+
2097
+ return any(
2098
+ p.Mode == TOM.ModeType.DirectLake
2099
+ for t in self.model.Tables
2100
+ for p in t.Partitions
2101
+ )
2102
+
2103
+ def is_field_parameter(self, table_name: str):
2104
+ """
2105
+ Identifies if a table is a field parameter.
2106
+
2107
+ Parameters
2108
+ ----------
2109
+ table_name : str
2110
+ Name of the table.
2111
+
2112
+ Returns
2113
+ -------
2114
+ bool
2115
+ Indicates if the table is a field parameter.
2116
+ """
2117
+ import Microsoft.AnalysisServices.Tabular as TOM
2118
+
2119
+ t = self.model.Tables[table_name]
2120
+
2121
+ return (
2122
+ any(
2123
+ p.SourceType == TOM.PartitionSourceType.Calculated
2124
+ and "NAMEOF(" in p.Source.Expression
2125
+ for p in t.Partitions
2126
+ )
2127
+ and all(
2128
+ "[Value" in c.SourceColumn
2129
+ for c in t.Columns
2130
+ if c.Type != TOM.ColumnType.RowNumber
2131
+ )
2132
+ and t.Columns.Count == 4
2133
+ )
2134
+
2135
+ def is_auto_date_table(self, table_name: str):
2136
+ """
2137
+ Identifies if a table is an auto-date table.
2138
+
2139
+ Parameters
2140
+ ----------
2141
+ table_name : str
2142
+ Name of the table.
2143
+
2144
+ Returns
2145
+ -------
2146
+ bool
2147
+ Indicates if the table is an auto-date table.
2148
+ """
2149
+ import Microsoft.AnalysisServices.Tabular as TOM
2150
+
2151
+ isAutoDate = False
2152
+
2153
+ t = self.model.Tables[table_name]
2154
+
2155
+ if t.Name.startswith("LocalDateTable_") or t.Name.startswith(
2156
+ "DateTableTemplate_"
2157
+ ):
2158
+ if any(
2159
+ p.SourceType == TOM.PartitionSourceType.Calculated for p in t.Partitions
2160
+ ):
2161
+ isAutoDate = True
2162
+
2163
+ return isAutoDate
2164
+
2165
+ def set_kpi(
2166
+ self,
2167
+ measure_name: str,
2168
+ target: Union[int, float, str],
2169
+ lower_bound: float,
2170
+ upper_bound: float,
2171
+ lower_mid_bound: Optional[float] = None,
2172
+ upper_mid_bound: Optional[float] = None,
2173
+ status_type: Optional[str] = None,
2174
+ status_graphic: Optional[str] = None,
2175
+ ):
2176
+ """
2177
+ Sets the properties to add/update a KPI for a measure.
2178
+
2179
+ Parameters
2180
+ ----------
2181
+ measure_name : str
2182
+ Name of the measure.
2183
+ target : str, int, float
2184
+ The target for the KPI. This can either be a number or the name of a different measure in the semantic model.
2185
+ lower_bound: float
2186
+ The lower bound for the KPI.
2187
+ upper_bound : float
2188
+ The upper bound for the KPI.
2189
+ lower_mid_bound : float, default=None
2190
+ The lower-mid bound for the KPI. Set this if status_type is 'Centered' or 'CenteredReversed'.
2191
+ upper_mid_bound : float, default=None
2192
+ The upper-mid bound for the KPI. Set this if status_type is 'Centered' or 'CenteredReversed'.
2193
+ status_type : str, default=None
2194
+ The status type of the KPI. Options: 'Linear', 'LinearReversed', 'Centered', 'CenteredReversed'.
2195
+ Defaults to None which resolvs to 'Linear'.
2196
+ status_graphic : str, default=None
2197
+ The status graphic for the KPI.
2198
+ Defaults to 'Three Circles Colored'.
2199
+ """
2200
+ import Microsoft.AnalysisServices.Tabular as TOM
2201
+
2202
+ # https://github.com/m-kovalsky/Tabular/blob/master/KPI%20Graphics.md
2203
+
2204
+ if measure_name == target:
2205
+ print(
2206
+ f"The 'target' parameter cannot be the same measure as the 'measure_name' parameter."
2207
+ )
2208
+ return
2209
+
2210
+ if status_graphic is None:
2211
+ status_graphic = "Three Circles Colored"
2212
+
2213
+ statusType = ["Linear", "LinearReversed", "Centered", "CenteredReversed"]
2214
+ status_type = status_type.title().replace(" ", "")
2215
+
2216
+ if status_type is None:
2217
+ status_type = "Linear"
2218
+
2219
+ if status_type not in statusType:
2220
+ print(
2221
+ f"'{status_type}' is an invalid status_type. Please choose from these options: {statusType}."
2222
+ )
2223
+ return
2224
+
2225
+ if status_type in ["Linear", "LinearReversed"]:
2226
+ if upper_bound is not None or lower_mid_bound is not None:
2227
+ print(
2228
+ f"The 'upper_mid_bound' and 'lower_mid_bound' parameters are not used in the 'Linear' and 'LinearReversed' status types. Make sure these parameters are set to None."
2229
+ )
2230
+ return
2231
+ elif upper_bound <= lower_bound:
2232
+ print(f"The upper_bound must be greater than the lower_bound.")
2233
+ return
2234
+
2235
+ if status_type in ["Centered", "CenteredReversed"]:
2236
+ if upper_mid_bound is None or lower_mid_bound is None:
2237
+ print(
2238
+ f"The 'upper_mid_bound' and 'lower_mid_bound' parameters are necessary in the 'Centered' and 'CenteredReversed' status types."
2239
+ )
2240
+ return
2241
+ elif upper_bound <= upper_mid_bound:
2242
+ print(f"The upper_bound must be greater than the upper_mid_bound.")
2243
+ elif upper_mid_bound <= lower_mid_bound:
2244
+ print(f"The upper_mid_bound must be greater than the lower_mid_bound.")
2245
+ elif lower_mid_bound <= lower_bound:
2246
+ print(f"The lower_mid_bound must be greater than the lower_bound.")
2247
+
2248
+ try:
2249
+ table_name = next(
2250
+ m.Parent.Name for m in self.all_measures() if m.Name == measure_name
2251
+ )
2252
+ except:
2253
+ print(
2254
+ f"The '{measure_name}' measure does not exist in the '{self.dataset}' semantic model within the '{self.workspace}'."
2255
+ )
2256
+ return
2257
+
2258
+ graphics = [
2259
+ "Cylinder",
2260
+ "Five Bars Colored",
2261
+ "Five Boxes Colored",
2262
+ "Gauge - Ascending",
2263
+ "Gauge - Descending",
2264
+ "Road Signs",
2265
+ "Shapes",
2266
+ "Standard Arrow",
2267
+ "Three Circles Colored",
2268
+ "Three Flags Colored",
2269
+ "Three Stars Colored",
2270
+ "Three Symbols Uncircled Colored",
2271
+ "Traffic Light",
2272
+ "Traffic Light - Single",
2273
+ "Variance Arrow",
2274
+ "Status Arrow - Ascending",
2275
+ "Status Arrow - Descending",
2276
+ ]
2277
+
2278
+ if status_graphic not in graphics:
2279
+ print(
2280
+ f"The '{status_graphic}' status graphic is not valid. Please choose from these options: {graphics}."
2281
+ )
2282
+ return
2283
+
2284
+ measure_target = True
2285
+
2286
+ try:
2287
+ float(target)
2288
+ tgt = str(target)
2289
+ measure_target = False
2290
+ except:
2291
+ try:
2292
+ tgt = next(
2293
+ format_dax_object_name(m.Parent.Name, m.Name)
2294
+ for m in self.all_measures()
2295
+ if m.Name == target
2296
+ )
2297
+ except:
2298
+ print(
2299
+ f"The '{target}' measure does not exist in the '{self.dataset}' semantic model within the '{self.workspace}'."
2300
+ )
2301
+
2302
+ if measure_target:
2303
+ expr = f"var x = [{measure_name}]/[{target}]\nreturn"
2304
+ else:
2305
+ expr = f"var x = [{measure_name}\nreturn"
2306
+
2307
+ if status_type == "Linear":
2308
+ expr = f"{expr}\nif(isblank(x),blank(),\n\tif(x<{lower_bound},-1,\n\t\tif(x<{upper_bound},0,1)))"
2309
+ elif status_type == "LinearReversed":
2310
+ expr = f"{expr}\nif(isblank(x),blank(),\nif(x<{lower_bound},1,\n\t\tif(x<{upper_bound},0,-1)))"
2311
+ elif status_type == "Centered":
2312
+ expr = f"{expr}\nif(isblank(x),blank(),\n\tif(x<{lower_mid_bound},\n\t\tif(x<{lower_bound},-1,0),\n\t\t\tif(x<{upper_mid_bound},1,\n\t\t\t\tif(x<{upper_bound}0,-1))))"
2313
+ elif status_type == "CenteredReversed":
2314
+ expr = f"{expr}\nif(isblank(x),blank(),\n\tif(x<{lower_mid_bound},\n\t\tif(x<{lower_bound},1,0),\n\t\t\tif(x<{upper_mid_bound},-1,\n\t\t\t\tif(x<{upper_bound}0,1))))"
2315
+
2316
+ kpi = TOM.KPI()
2317
+ kpi.TargetExpression = tgt
2318
+ kpi.StatusGraphic = status_graphic
2319
+ kpi.StatusExpression = expr
2320
+
2321
+ ms = self.model.Tables[table_name].Measures[measure_name]
2322
+ try:
2323
+ ms.KPI.TargetExpression = tgt
2324
+ ms.KPI.StatusGraphic = status_graphic
2325
+ ms.KPI.StatusExpression = expr
2326
+ except:
2327
+ ms.KPI = kpi
2328
+
2329
+ def set_aggregations(self, table_name: str, agg_table_name: str):
2330
+ """
2331
+ Sets the aggregations (alternate of) for all the columns in an aggregation table based on a base table.
2332
+
2333
+ Parameters
2334
+ ----------
2335
+ table_name : str
2336
+ Name of the base table.
2337
+ agg_table_name : str
2338
+ Name of the aggregation table.
2339
+
2340
+ Returns
2341
+ -------
2342
+
2343
+ """
2344
+
2345
+ for c in self.model.Tables[agg_table_name].Columns:
2346
+
2347
+ dataType = c.DataType
2348
+
2349
+ if dataType in [
2350
+ TOM.DataType.String,
2351
+ TOM.DataType.Boolean,
2352
+ TOM.DataType.DateTime,
2353
+ ]:
2354
+ sumType = "GroupBy"
2355
+ else:
2356
+ sumType = "Sum"
2357
+
2358
+ self.set_alternate_of(
2359
+ table_name=agg_table_name,
2360
+ column_name=c.Name,
2361
+ base_table=table_name,
2362
+ base_column=c.Name,
2363
+ summarization_type=sumType,
2364
+ )
2365
+
2366
+ def set_is_available_in_mdx(
2367
+ self, table_name: str, column_name: str, value: Optional[bool] = False
2368
+ ):
2369
+ """
2370
+ Sets the IsAvailableInMdx property on a column.
2371
+
2372
+ Parameters
2373
+ ----------
2374
+ table_name : str
2375
+ Name of the table.
2376
+ column_name : str
2377
+ Name of the column.
2378
+ value : bool, default=False
2379
+ The IsAvailableInMdx property value.
2380
+ """
2381
+
2382
+ self.model.Tables[table_name].Columns[column_name].IsAvailableInMdx = value
2383
+
2384
+ def set_summarize_by(
2385
+ self, table_name: str, column_name: str, value: Optional[str] = None
2386
+ ):
2387
+ """
2388
+ Sets the SummarizeBy property on a column.
2389
+
2390
+ Parameters
2391
+ ----------
2392
+ table_name : str
2393
+ Name of the table.
2394
+ column_name : str
2395
+ Name of the column.
2396
+ value : bool, default=None
2397
+ The SummarizeBy property value.
2398
+ Defaults to none which resolves to 'Default'.
2399
+ `Aggregate valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.aggregatefunction?view=analysisservices-dotnet>`_
2400
+ """
2401
+ import System
2402
+
2403
+ values = [
2404
+ "Default",
2405
+ "None",
2406
+ "Sum",
2407
+ "Min",
2408
+ "Max",
2409
+ "Count",
2410
+ "Average",
2411
+ "DistinctCount",
2412
+ ]
2413
+ # https://learn.microsoft.com/en-us/dotnet/api/microsoft.analysisservices.tabular.column.summarizeby?view=analysisservices-dotnet#microsoft-analysisservices-tabular-column-summarizeby
2414
+
2415
+ if value is None:
2416
+ value = "Default"
2417
+ value = (
2418
+ value.capitalize()
2419
+ .replace("Distinctcount", "DistinctCount")
2420
+ .replace("Avg", "Average")
2421
+ )
2422
+
2423
+ if value not in values:
2424
+ print(
2425
+ f"'{value}' is not a valid value for the SummarizeBy property. These are the valid values: {values}."
2426
+ )
2427
+ return
2428
+
2429
+ self.model.Tables[table_name].Columns[column_name].SummarizeBy = (
2430
+ System.Enum.Parse(TOM.AggregateFunction, value)
2431
+ )
2432
+
2433
+ def set_direct_lake_behavior(self, direct_lake_behavior: str):
2434
+ """
2435
+ Sets the Direct Lake Behavior property for a semantic model.
2436
+
2437
+ Parameters
2438
+ ----------
2439
+ direct_lake_behavior : str
2440
+ The DirectLakeBehavior property value.
2441
+ `DirectLakeBehavior valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.directlakebehavior?view=analysisservices-dotnet>`_
2442
+ """
2443
+ import System
2444
+
2445
+ direct_lake_behavior = direct_lake_behavior.capitalize()
2446
+ if direct_lake_behavior.startswith("Auto"):
2447
+ direct_lake_behavior = "Automatic"
2448
+ elif direct_lake_behavior.startswith("Directl") or direct_lake_behavior == "Dl":
2449
+ direct_lake_behavior = "DirectLakeOnly"
2450
+ elif direct_lake_behavior.startswith("Directq") or direct_lake_behavior == "Dq":
2451
+ direct_lake_behavior = "DirectQueryOnly"
2452
+
2453
+ dlValues = ["Automatic", "DirectLakeOnly", "DirectQueryOnly"]
2454
+
2455
+ if direct_lake_behavior not in dlValues:
2456
+ print(
2457
+ f"The 'direct_lake_behavior' parameter must be one of these values: {dlValues}."
2458
+ )
2459
+ return
2460
+
2461
+ self.model.DirectLakeBehavior = System.Enum.Parse(
2462
+ TOM.DirectLakeBehavior, direct_lake_behavior
2463
+ )
2464
+
2465
+ def add_table(
2466
+ self,
2467
+ name: str,
2468
+ description: Optional[str] = None,
2469
+ data_category: Optional[str] = None,
2470
+ hidden: Optional[bool] = False,
2471
+ ):
2472
+ """
2473
+ Adds a table to the semantic model.
2474
+
2475
+ Parameters
2476
+ ----------
2477
+ name : str
2478
+ Name of the table.
2479
+ description : str, default=None
2480
+ A description of the table.
2481
+ data_catgegory : str, default=None
2482
+ The data category for the table.
2483
+ hidden : bool, default=False
2484
+ Whether the table is hidden or visible.
2485
+ """
2486
+ import Microsoft.AnalysisServices.Tabular as TOM
2487
+
2488
+ t = TOM.Table()
2489
+ t.Name = name
2490
+ if description is not None:
2491
+ t.Description = description
2492
+ if data_category is not None:
2493
+ t.DataCategory = data_category
2494
+ t.Hidden = hidden
2495
+ self.model.Tables.Add(t)
2496
+
2497
+ def add_calculated_table(
2498
+ self,
2499
+ name: str,
2500
+ expression: str,
2501
+ description: Optional[str] = None,
2502
+ data_category: Optional[str] = None,
2503
+ hidden: Optional[bool] = False,
2504
+ ):
2505
+ """
2506
+ Adds a calculated table to the semantic model.
2507
+
2508
+ Parameters
2509
+ ----------
2510
+ name : str
2511
+ Name of the table.
2512
+ expression : str
2513
+ The DAX expression for the calculated table.
2514
+ description : str, default=None
2515
+ A description of the table.
2516
+ data_catgegory : str, default=None
2517
+ The data category for the table.
2518
+ hidden : bool, default=False
2519
+ Whether the table is hidden or visible.
2520
+ """
2521
+ import Microsoft.AnalysisServices.Tabular as TOM
2522
+
2523
+ par = TOM.Partition()
2524
+ par.Name = name
2525
+
2526
+ parSource = TOM.CalculatedPartitionSource()
2527
+ parSource.Expression = expression
2528
+ par.Source = parSource
2529
+
2530
+ t = TOM.Table()
2531
+ t.Name = name
2532
+ if description is not None:
2533
+ t.Description = description
2534
+ if data_category is not None:
2535
+ t.DataCategory = data_category
2536
+ t.Hidden = hidden
2537
+ t.Partitions.Add(par)
2538
+ self.model.Tables.Add(t)
2539
+
2540
+ def add_field_parameter(self, table_name: str, objects: List[str]):
2541
+ """
2542
+ Adds a table to the semantic model.
2543
+
2544
+ Parameters
2545
+ ----------
2546
+ table_name : str
2547
+ Name of the table.
2548
+ objects : List[str]
2549
+ The columns/measures to be included in the field parameter.
2550
+ Columns must be specified as such : 'Table Name'[Column Name].
2551
+ Measures may be formatted as '[Measure Name]' or 'Measure Name'.
2552
+ """
2553
+
2554
+ if isinstance(objects, str):
2555
+ print(f"The 'objects' parameter must be a list of columns/measures.")
2556
+ return
2557
+ if len(objects) == 1:
2558
+ print(
2559
+ f"There must be more than one object (column/measure) within the objects parameter."
2560
+ )
2561
+ return
2562
+
2563
+ expr = ""
2564
+ i = 0
2565
+ for obj in objects:
2566
+ success = False
2567
+ for m in self.all_measures():
2568
+ if obj == "[" + m.Name + "]" or obj == m.Name:
2569
+ expr = (
2570
+ expr
2571
+ + "\n\t"
2572
+ + '("'
2573
+ + m.Name
2574
+ + '", NAMEOF(['
2575
+ + m.Name
2576
+ + "]), "
2577
+ + str(i)
2578
+ + "),"
2579
+ )
2580
+ success = True
2581
+ for c in self.all_columns():
2582
+ fullObjName = format_dax_object_name(c.Parent.Name, c.Name)
2583
+ if obj == fullObjName or obj == c.Parent.Name + "[" + c.Name + "]":
2584
+ expr = (
2585
+ expr
2586
+ + "\n\t"
2587
+ + '("'
2588
+ + c.Name
2589
+ + '", NAMEOF('
2590
+ + fullObjName
2591
+ + "), "
2592
+ + str(i)
2593
+ + "),"
2594
+ )
2595
+ success = True
2596
+ if not success:
2597
+ print(
2598
+ f"The '{obj}' object was not found in the '{self.dataset}' semantic model."
2599
+ )
2600
+ return
2601
+ else:
2602
+ i += 1
2603
+
2604
+ expr = "{" + expr.rstrip(",") + "\n}"
2605
+
2606
+ self.add_calculated_table(name=table_name, expression=expr)
2607
+
2608
+ col2 = table_name + " Fields"
2609
+ col3 = table_name + " Order"
2610
+
2611
+ self.add_calculated_table_column(
2612
+ table_name=table_name,
2613
+ column_name=table_name,
2614
+ source_column="[Value1]",
2615
+ data_type="String",
2616
+ hidden=False,
2617
+ )
2618
+ self.add_calculated_table_column(
2619
+ table_name=table_name,
2620
+ column_name=col2,
2621
+ source_column="[Value2]",
2622
+ data_type="String",
2623
+ hidden=True,
2624
+ )
2625
+ self.add_calculated_table_column(
2626
+ table_name=table_name,
2627
+ column_name=col3,
2628
+ source_column="[Value3]",
2629
+ data_type="Int64",
2630
+ hidden=True,
2631
+ )
2632
+
2633
+ self.set_extended_property(
2634
+ self=self,
2635
+ object=self.model.Tables[table_name].Columns[col2],
2636
+ extended_property_type="Json",
2637
+ name="ParameterMetadata",
2638
+ value='{"version":3,"kind":2}',
2639
+ )
2640
+
2641
+ rcd = TOM.RelatedColumnDetails()
2642
+ gpc = TOM.GroupByColumn()
2643
+ gpc.GroupingColumn = self.model.Tables[table_name].Columns[col2]
2644
+ rcd.GroupByColumns.Add(gpc)
2645
+
2646
+ # Update column properties
2647
+ self.model.Tables[table_name].Columns[col2].SortByColumn = self.model.Tables[
2648
+ table_name
2649
+ ].Columns[col3]
2650
+ self.model.Tables[table_name].Columns[table_name].RelatedColumnDetails = rcd
2651
+
2652
+ self.tables_added.append(table_name)
2653
+
2654
+ def remove_vertipaq_annotations(self):
2655
+ """
2656
+ Removes the annotations set using the [set_vertipaq_annotations] function.
2657
+ """
2658
+
2659
+ for t in self.model.Tables:
2660
+ for a in t.Annotations:
2661
+ if a.Name.startswith("Vertipaq_"):
2662
+ self.remove_annotation(object=t, name=a.Name)
2663
+ for c in t.Columns:
2664
+ for a in c.Annotations:
2665
+ if a.Name.startswith("Vertipaq_"):
2666
+ self.remove_annotation(object=c, name=a.Name)
2667
+ for h in t.Hierarchies:
2668
+ for a in h.Annotations:
2669
+ if a.Name.startswith("Vertipaq_"):
2670
+ self.remove_annotation(object=h, name=a.Name)
2671
+ for p in t.Partitions:
2672
+ for a in p.Annotations:
2673
+ if a.Name.startswith("Vertipaq_"):
2674
+ self.remove_annotation(object=p, name=a.Name)
2675
+ for r in self.model.Relationships:
2676
+ for a in r.Annotations:
2677
+ if a.Name.startswith("Veripaq_"):
2678
+ self.remove_annotation(object=r, name=a.Name)
2679
+
2680
+ def set_vertipaq_annotations(self):
2681
+ """
2682
+ Saves Vertipaq Analyzer statistics as annotations on objects in the semantic model.
2683
+ """
2684
+
2685
+ dfT = fabric.list_tables(
2686
+ dataset=self.dataset, workspace=self.workspace, extended=True
2687
+ )
2688
+ dfC = fabric.list_columns(
2689
+ dataset=self.dataset, workspace=self.workspace, extended=True
2690
+ )
2691
+ # intList = ['Total Size']#, 'Data Size', 'Dictionary Size', 'Hierarchy Size']
2692
+ dfCSum = dfC.groupby(["Table Name"])["Total Size"].sum().reset_index()
2693
+ dfTable = pd.merge(
2694
+ dfT[["Name", "Type", "Row Count"]],
2695
+ dfCSum[["Table Name", "Total Size"]],
2696
+ left_on="Name",
2697
+ right_on="Table Name",
2698
+ how="inner",
2699
+ )
2700
+ dfP = fabric.list_partitions(
2701
+ dataset=self.dataset, workspace=self.workspace, extended=True
2702
+ )
2703
+ dfP["Records per Segment"] = round(
2704
+ dfP["Record Count"] / dfP["Segment Count"], 2
2705
+ )
2706
+ dfH = fabric.list_hierarchies(
2707
+ dataset=self.dataset, workspace=self.workspace, extended=True
2708
+ )
2709
+ dfR = list_relationships(
2710
+ dataset=self.dataset, workspace=self.workspace, extended=True
2711
+ )
2712
+
2713
+ for t in self.model.Tables:
2714
+ dfT_filt = dfTable[dfTable["Name"] == t.Name]
2715
+ rowCount = str(dfT_filt["Row Count"].iloc[0])
2716
+ totalSize = str(dfT_filt["Total Size"].iloc[0])
2717
+ self.set_annotation(object=t, name="Vertipaq_RowCount", value=rowCount)
2718
+ self.set_annotation(object=t, name="Vertipaq_TableSize", value=totalSize)
2719
+ for c in t.Columns:
2720
+ dfC_filt = dfC[
2721
+ (dfC["Table Name"] == t.Name) & (dfC["Column Name"] == c.Name)
2722
+ ]
2723
+ totalSize = str(dfC_filt["Total Size"].iloc[0])
2724
+ dataSize = str(dfC_filt["Data Size"].iloc[0])
2725
+ dictSize = str(dfC_filt["Dictionary Size"].iloc[0])
2726
+ hierSize = str(dfC_filt["Hierarchy Size"].iloc[0])
2727
+ card = str(dfC_filt["Column Cardinality"].iloc[0])
2728
+ self.set_annotation(
2729
+ object=c, name="Vertipaq_TotalSize", value=totalSize
2730
+ )
2731
+ self.set_annotation(object=c, name="Vertipaq_DataSize", value=dataSize)
2732
+ self.set_annotation(
2733
+ object=c, name="Vertipaq_DictionarySize", value=dictSize
2734
+ )
2735
+ self.set_annotation(
2736
+ object=c, name="Vertipaq_HierarchySize", value=hierSize
2737
+ )
2738
+ self.set_annotation(object=c, name="Vertipaq_Cardinality", value=card)
2739
+ for p in t.Partitions:
2740
+ dfP_filt = dfP[
2741
+ (dfP["Table Name"] == t.Name) & (dfP["Partition Name"] == p.Name)
2742
+ ]
2743
+ recordCount = str(dfP_filt["Record Count"].iloc[0])
2744
+ segmentCount = str(dfP_filt["Segment Count"].iloc[0])
2745
+ rpS = str(dfP_filt["Records per Segment"].iloc[0])
2746
+ self.set_annotation(
2747
+ object=p, name="Vertipaq_RecordCount", value=recordCount
2748
+ )
2749
+ self.set_annotation(
2750
+ object=p, name="Vertipaq_SegmentCount", value=segmentCount
2751
+ )
2752
+ self.set_annotation(
2753
+ object=p, name="Vertipaq_RecordsPerSegment", value=rpS
2754
+ )
2755
+ for h in t.Hierarchies:
2756
+ dfH_filt = dfH[
2757
+ (dfH["Table Name"] == t.Name) & (dfH["Hierarchy Name"] == h.Name)
2758
+ ]
2759
+ usedSize = str(dfH_filt["Used Size"].iloc[0])
2760
+ self.set_annotation(object=h, name="Vertipaq_UsedSize", value=usedSize)
2761
+ for r in self.model.Relationships:
2762
+ dfR_filt = dfR[dfR["Relationship Name"] == r.Name]
2763
+ relSize = str(dfR_filt["Used Size"].iloc[0])
2764
+ self.set_annotation(object=r, name="Vertipaq_UsedSize", value=relSize)
2765
+
2766
+ try:
2767
+ runId = self.get_annotation_value(object=self.model, name="Vertipaq_Run")
2768
+ runId = str(int(runId) + 1)
2769
+ except:
2770
+ runId = "1"
2771
+ self.set_annotation(object=self.model, name="Vertipaq_Run", value=runId)
2772
+
2773
+ def row_count(self, object: Union["TOM.Partition", "TOM.Table"]):
2774
+ """
2775
+ Obtains the row count of a table or partition within a semantic model.
2776
+
2777
+ Parameters
2778
+ ----------
2779
+ object : TOM Object
2780
+ The table/partition object within the semantic model.
2781
+
2782
+ Returns
2783
+ -------
2784
+ int
2785
+ Number of rows within the TOM object.
2786
+ """
2787
+ import Microsoft.AnalysisServices.Tabular as TOM
2788
+
2789
+ objType = object.ObjectType
2790
+
2791
+ if objType == TOM.ObjectType.Table:
2792
+ result = self.get_annotation_value(object=object, name="Vertipaq_RowCount")
2793
+ elif objType == TOM.ObjectType.Partition:
2794
+ result = self.get_annotation_value(
2795
+ object=object, name="Vertipaq_RecordCount"
2796
+ )
2797
+
2798
+ return int(result)
2799
+
2800
+ def records_per_segment(self, object: "TOM.Partition"):
2801
+ """
2802
+ Obtains the records per segment of a partition within a semantic model.
2803
+
2804
+ Parameters
2805
+ ----------
2806
+ object : TOM Object
2807
+ The partition object within the semantic model.
2808
+
2809
+ Returns
2810
+ -------
2811
+ float
2812
+ Number of records per segment within the partition.
2813
+ """
2814
+ import Microsoft.AnalysisServices.Tabular as TOM
2815
+
2816
+ objType = object.ObjectType
2817
+
2818
+ if objType == TOM.ObjectType.Partition:
2819
+ result = self.get_annotation_value(
2820
+ object=object, name="Vertipaq_RecordsPerSegment"
2821
+ )
2822
+
2823
+ return float(result)
2824
+
2825
+ def used_size(self, object: Union["TOM.Hierarchy", "TOM.Relationship"]):
2826
+ """
2827
+ Obtains the used size of a hierarchy or relationship within a semantic model.
2828
+
2829
+ Parameters
2830
+ ----------
2831
+ object : TOM Object
2832
+ The hierarhcy/relationship object within the semantic model.
2833
+
2834
+ Returns
2835
+ -------
2836
+ int
2837
+ Used size of the TOM object.
2838
+ """
2839
+ import Microsoft.AnalysisServices.Tabular as TOM
2840
+
2841
+ objType = object.ObjectType
2842
+
2843
+ if objType == TOM.ObjectType.Hierarchy:
2844
+ result = self.get_annotation_value(object=object, name="Vertipaq_UsedSize")
2845
+ elif objType == TOM.ObjectType.Relationship:
2846
+ result = self.get_annotation_value(object=object, name="Vertipaq_UsedSize")
2847
+
2848
+ return int(result)
2849
+
2850
+ def data_size(self, column: "TOM.Column"):
2851
+ """
2852
+ Obtains the data size of a column within a semantic model.
2853
+
2854
+ Parameters
2855
+ ----------
2856
+ column : TOM Object
2857
+ The column object within the semantic model.
2858
+
2859
+ Returns
2860
+ -------
2861
+ int
2862
+ Data size of the TOM column.
2863
+ """
2864
+ import Microsoft.AnalysisServices.Tabular as TOM
2865
+
2866
+ objType = column.ObjectType
2867
+
2868
+ if objType == TOM.ObjectType.Column:
2869
+ result = self.get_annotation_value(object=column, name="Vertipaq_DataSize")
2870
+
2871
+ return int(result)
2872
+
2873
+ def dictionary_size(self, column: "TOM.Column"):
2874
+ """
2875
+ Obtains the dictionary size of a column within a semantic model.
2876
+
2877
+ Parameters
2878
+ ----------
2879
+ column : TOM Object
2880
+ The column object within the semantic model.
2881
+
2882
+ Returns
2883
+ -------
2884
+ int
2885
+ Dictionary size of the TOM column.
2886
+ """
2887
+ import Microsoft.AnalysisServices.Tabular as TOM
2888
+
2889
+ objType = column.ObjectType
2890
+
2891
+ if objType == TOM.ObjectType.Column:
2892
+ result = self.get_annotation_value(
2893
+ object=column, name="Vertipaq_DictionarySize"
2894
+ )
2895
+
2896
+ return int(result)
2897
+
2898
+ def total_size(self, object: Union["TOM.Table", "TOM.Column"]):
2899
+ """
2900
+ Obtains the data size of a table/column within a semantic model.
2901
+
2902
+ Parameters
2903
+ ----------
2904
+ object : TOM Object
2905
+ The table/column object within the semantic model.
2906
+
2907
+ Returns
2908
+ -------
2909
+ int
2910
+ Total size of the TOM table/column.
2911
+ """
2912
+ import Microsoft.AnalysisServices.Tabular as TOM
2913
+
2914
+ objType = object.ObjectType
2915
+
2916
+ if objType == TOM.ObjectType.Column:
2917
+ result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
2918
+ elif objType == TOM.ObjectType.Table:
2919
+ result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
2920
+
2921
+ return int(result)
2922
+
2923
+ def cardinality(self, column: "TOM.Column"):
2924
+ """
2925
+ Obtains the cardinality of a column within a semantic model.
2926
+
2927
+ Parameters
2928
+ ----------
2929
+ column : TOM Object
2930
+ The column object within the semantic model.
2931
+
2932
+ Returns
2933
+ -------
2934
+ int
2935
+ Cardinality of the TOM column.
2936
+ """
2937
+ import Microsoft.AnalysisServices.Tabular as TOM
2938
+
2939
+ objType = column.ObjectType
2940
+
2941
+ if objType == TOM.ObjectType.Column:
2942
+ result = self.get_annotation_value(
2943
+ object=column, name="Vertipaq_Cardinality"
2944
+ )
2945
+
2946
+ return int(result)
2947
+
2948
+ def depends_on(self, object, dependencies: pd.DataFrame):
2949
+ """
2950
+ Obtains the objects on which the specified object depends.
2951
+
2952
+ Parameters
2953
+ ----------
2954
+ object : TOM Object
2955
+ The TOM object within the semantic model.
2956
+ dependencies : pandas.DataFrame
2957
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
2958
+
2959
+ Returns
2960
+ -------
2961
+ Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection
2962
+ Objects on which the specified object depends.
2963
+ """
2964
+ import Microsoft.AnalysisServices.Tabular as TOM
2965
+
2966
+ objType = object.ObjectType
2967
+ objName = object.Name
2968
+ objParentName = object.Parent.Name
2969
+
2970
+ if objType == TOM.ObjectType.Table:
2971
+ objParentName = objName
2972
+
2973
+ fil = dependencies[
2974
+ (dependencies["Object Type"] == objType)
2975
+ & (dependencies["Table Name"] == objParentName)
2976
+ & (dependencies["Object Name"] == objName)
2977
+ ]
2978
+ meas = (
2979
+ fil[fil["Referenced Object Type"] == "Measure"]["Referenced Object"]
2980
+ .unique()
2981
+ .tolist()
2982
+ )
2983
+ cols = (
2984
+ fil[fil["Referenced Object Type"] == "Column"][
2985
+ "Referenced Full Object Name"
2986
+ ]
2987
+ .unique()
2988
+ .tolist()
2989
+ )
2990
+ tbls = (
2991
+ fil[fil["Referenced Object Type"] == "Table"]["Referenced Table"]
2992
+ .unique()
2993
+ .tolist()
2994
+ )
2995
+ for m in self.all_measures():
2996
+ if m.Name in meas:
2997
+ yield m
2998
+ for c in self.all_columns():
2999
+ if format_dax_object_name(c.Parent.Name, c.Name) in cols:
3000
+ yield c
3001
+ for t in self.model.Tables:
3002
+ if t.Name in tbls:
3003
+ yield t
3004
+
3005
+ def referenced_by(self, object, dependencies: pd.DataFrame):
3006
+ """
3007
+ Obtains the objects which reference the specified object.
3008
+
3009
+ Parameters
3010
+ ----------
3011
+ object : TOM Object
3012
+ The TOM object within the semantic model.
3013
+ dependencies : pandas.DataFrame
3014
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
3015
+
3016
+ Returns
3017
+ -------
3018
+ Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection
3019
+ Objects which reference the specified object.
3020
+ """
3021
+ import Microsoft.AnalysisServices.Tabular as TOM
3022
+
3023
+ objType = object.ObjectType
3024
+ objName = object.Name
3025
+ objParentName = object.Parent.Name
3026
+
3027
+ if objType == TOM.ObjectType.Table:
3028
+ objParentName = objName
3029
+
3030
+ fil = dependencies[
3031
+ (dependencies["Referenced Object Type"] == objType)
3032
+ & (dependencies["Referenced Table"] == objParentName)
3033
+ & (dependencies["Referenced Object"] == objName)
3034
+ ]
3035
+ meas = fil[fil["Object Type"] == "Measure"]["Object Name"].unique().tolist()
3036
+ cols = (
3037
+ fil[fil["Object Type"].isin(["Column", "Calc Column"])]["Full Object Name"]
3038
+ .unique()
3039
+ .tolist()
3040
+ )
3041
+ tbls = (
3042
+ fil[fil["Object Type"].isin(["Table", "Calc Table"])]["Table Name"]
3043
+ .unique()
3044
+ .tolist()
3045
+ )
3046
+ for m in self.all_measures():
3047
+ if m.Name in meas:
3048
+ yield m
3049
+ for c in self.all_columns():
3050
+ if format_dax_object_name(c.Parent.Name, c.Name) in cols:
3051
+ yield c
3052
+ for t in self.model.Tables:
3053
+ if t.Name in tbls:
3054
+ yield t
3055
+
3056
+ def fully_qualified_measures(
3057
+ self, object: "TOM.Measure", dependencies: pd.DataFrame
3058
+ ):
3059
+ """
3060
+ Obtains all fully qualified measure references for a given object.
3061
+
3062
+ Parameters
3063
+ ----------
3064
+ object : TOM Object
3065
+ The TOM object within the semantic model.
3066
+ dependencies : pandas.DataFrame
3067
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
3068
+
3069
+ Returns
3070
+ -------
3071
+ Microsoft.AnalysisServices.Tabular.MeasureCollection
3072
+ All fully qualified measure references for a given object.
3073
+ """
3074
+ import Microsoft.AnalysisServices.Tabular as TOM
3075
+
3076
+ for obj in self.depends_on(object=object, dependencies=dependencies):
3077
+ if obj.ObjectType == TOM.ObjectType.Measure:
3078
+ if (obj.Parent.Name + obj.Name in object.Expression) or (
3079
+ format_dax_object_name(obj.Parent.Name, obj.Name)
3080
+ in object.Expression
3081
+ ):
3082
+ yield obj
3083
+
3084
+ def unqualified_columns(self, object: "TOM.Column", dependencies: pd.DataFrame):
3085
+ """
3086
+ Obtains all unqualified column references for a given object.
3087
+
3088
+ Parameters
3089
+ ----------
3090
+ object : TOM Object
3091
+ The TOM object within the semantic model.
3092
+ dependencies : pandas.DataFrame
3093
+ A pandas dataframe with the output of the 'get_model_calc_dependencies' function.
3094
+
3095
+ Returns
3096
+ -------
3097
+ Microsoft.AnalysisServices.Tabular.ColumnCollection
3098
+ All unqualified column references for a given object.
3099
+ """
3100
+ import Microsoft.AnalysisServices.Tabular as TOM
3101
+
3102
+ def create_pattern(a, b):
3103
+ return r"(?<!" + a + "\[)(?<!" + a + "'\[)" + b
3104
+
3105
+ for obj in self.depends_on(object=object, dependencies=dependencies):
3106
+ if obj.ObjectType == TOM.ObjectType.Column:
3107
+ if (
3108
+ re.search(
3109
+ create_pattern(obj.Parent.Name, obj.Name), object.Expression
3110
+ )
3111
+ is not None
3112
+ ):
3113
+ yield obj
3114
+
3115
+ def is_direct_lake_using_view(self):
3116
+ """
3117
+ Identifies whether a semantic model is in Direct lake mode and uses views from the lakehouse.
3118
+
3119
+ Parameters
3120
+ ----------
3121
+
3122
+ Returns
3123
+ -------
3124
+ bool
3125
+ An indicator whether a semantic model is in Direct lake mode and uses views from the lakehouse.
3126
+ """
3127
+
3128
+ usingView = False
3129
+
3130
+ if self.is_direct_lake():
3131
+ df = check_fallback_reason(dataset=self.dataset, workspace=self.workspace)
3132
+ df_filt = df[df["FallbackReasonID"] == 2]
3133
+
3134
+ if len(df_filt) > 0:
3135
+ usingView = True
3136
+
3137
+ return usingView
3138
+
3139
+ def has_incremental_refresh_policy(self, table_name: str):
3140
+ """
3141
+ Identifies whether a table has an incremental refresh policy.
3142
+
3143
+ Parameters
3144
+ ----------
3145
+ table_name : str
3146
+ Name of the table.
3147
+
3148
+ Returns
3149
+ -------
3150
+ bool
3151
+ An indicator whether a table has an incremental refresh policy.
3152
+ """
3153
+
3154
+ hasRP = False
3155
+ rp = self.model.Tables[table_name].RefreshPolicy
3156
+
3157
+ if rp is not None:
3158
+ hasRP = True
3159
+
3160
+ return hasRP
3161
+
3162
+ def show_incremental_refresh_policy(self, table_name: str):
3163
+ """
3164
+ Prints the incremental refresh policy for a table.
3165
+
3166
+ Parameters
3167
+ ----------
3168
+ table_name : str
3169
+ Name of the table.
3170
+ """
3171
+
3172
+ rp = self.model.Tables[table_name].RefreshPolicy
3173
+
3174
+ if rp is None:
3175
+ print(
3176
+ f"The '{table_name}' table in the '{self.dataset}' semantic model within the '{self.workspace}' workspace does not have an incremental refresh policy."
3177
+ )
3178
+ else:
3179
+ print(f"Table Name: {table_name}")
3180
+ rwGran = str(rp.RollingWindowGranularity).lower()
3181
+ icGran = str(rp.IncrementalGranularity).lower()
3182
+ if rp.RollingWindowPeriods > 1:
3183
+ print(
3184
+ f"Archive data starting {start_bold}{rp.RollingWindowPeriods} {rwGran}s{end_bold} before refresh date."
3185
+ )
3186
+ else:
3187
+ print(
3188
+ f"Archive data starting {start_bold}{rp.RollingWindowPeriods} {rwGran}{end_bold} before refresh date."
3189
+ )
3190
+ if rp.IncrementalPeriods > 1:
3191
+ print(
3192
+ f"Incrementally refresh data {start_bold}{rp.IncrementalPeriods} {icGran}s{end_bold} before refresh date."
3193
+ )
3194
+ else:
3195
+ print(
3196
+ f"Incrementally refresh data {start_bold}{rp.IncrementalPeriods} {icGran}{end_bold} before refresh date."
3197
+ )
3198
+
3199
+ if rp.Mode == TOM.RefreshPolicyMode.Hybrid:
3200
+ print(
3201
+ f"{checked} Get the latest data in real time with DirectQuery (Premium only)"
3202
+ )
3203
+ else:
3204
+ print(
3205
+ f"{unchecked} Get the latest data in real time with DirectQuery (Premium only)"
3206
+ )
3207
+ if rp.IncrementalPeriodsOffset == -1:
3208
+ print(f"{checked} Only refresh complete days")
3209
+ else:
3210
+ print(f"{unchecked} Only refresh complete days")
3211
+ if len(rp.PollingExpression) > 0:
3212
+ pattern = r"\[([^\]]+)\]"
3213
+ match = re.search(pattern, rp.PollingExpression)
3214
+ if match:
3215
+ col = match[0][1:-1]
3216
+ fullCol = format_dax_object_name(table_name, col)
3217
+ print(
3218
+ f"{checked} Detect data changes: {start_bold}{fullCol}{end_bold}"
3219
+ )
3220
+ else:
3221
+ print(f"{unchecked} Detect data changes")
3222
+
3223
+ def update_incremental_refresh_policy(
3224
+ self,
3225
+ table_name: str,
3226
+ incremental_granularity: str,
3227
+ incremental_periods: int,
3228
+ rolling_window_granularity: str,
3229
+ rolling_window_periods: int,
3230
+ only_refresh_complete_days: Optional[bool] = False,
3231
+ detect_data_changes_column: Optional[str] = None,
3232
+ ):
3233
+ """
3234
+ Updates the incremental refresh policy for a table within a semantic model.
3235
+
3236
+ Parameters
3237
+ ----------
3238
+ table_name : str
3239
+ Name of the table.
3240
+ incremental_granularity : str
3241
+ Granularity of the (most recent) incremental refresh range.
3242
+ incremental_periods : int
3243
+ Number of periods for the incremental refresh range.
3244
+ rolling_window_granularity : str
3245
+ Target granularity of the rolling window for the whole semantic model.
3246
+ rolling_window_periods : int
3247
+ Number of periods for the rolling window for the whole semantic model.
3248
+ only_refresh_complete_days : bool, default=False
3249
+ Lag or leading periods from Now() to the rolling window head.
3250
+ detect_data_changes_column : str, default=None
3251
+ The column to use for detecting data changes.
3252
+ Defaults to None which resolves to not detecting data changes.
3253
+ """
3254
+ import Microsoft.AnalysisServices.Tabular as TOM
3255
+ import System
3256
+
3257
+ if not self.has_incremental_refresh_policy(table_name=table_name):
3258
+ print(
3259
+ f"The '{table_name}' table does not have an incremental refresh policy."
3260
+ )
3261
+ return
3262
+
3263
+ incGran = ["Day", "Month", "Quarter", "Year"]
3264
+
3265
+ incremental_granularity = incremental_granularity.capitalize()
3266
+ rolling_window_granularity = rolling_window_granularity.capitalize()
3267
+
3268
+ if incremental_granularity not in incGran:
3269
+ print(
3270
+ f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}."
3271
+ )
3272
+ return
3273
+ if rolling_window_granularity not in incGran:
3274
+ print(
3275
+ f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}."
3276
+ )
3277
+ return
3278
+
3279
+ if rolling_window_periods < 1:
3280
+ print(
3281
+ f"{icons.red_dot} Invalid 'rolling_window_periods' value. Must be a value greater than 0."
3282
+ )
3283
+ return
3284
+ if incremental_periods < 1:
3285
+ print(
3286
+ f"{icons.red_dot} Invalid 'incremental_periods' value. Must be a value greater than 0."
3287
+ )
3288
+ return
3289
+
3290
+ t = self.model.Tables[table_name]
3291
+
3292
+ if detect_data_changes_column is not None:
3293
+ dc = t.Columns[detect_data_changes_column]
3294
+
3295
+ if dc.DataType != TOM.DataType.DateTime:
3296
+ print(
3297
+ f"{icons.red_dot} Invalid 'detect_data_changes_column' parameter. This column must be of DateTime data type."
3298
+ )
3299
+ return
3300
+
3301
+ rp = TOM.BasicRefreshPolicy()
3302
+ rp.IncrementalPeriods = incremental_periods
3303
+ rp.IncrementalGranularity = System.Enum.Parse(
3304
+ TOM.RefreshGranularityType, incremental_granularity
3305
+ )
3306
+ rp.RollingWindowPeriods = rolling_window_periods
3307
+ rp.RollingWindowGranularity = System.Enum.Parse(
3308
+ TOM.RefreshGranularityType, rolling_window_granularity
3309
+ )
3310
+ rp.SourceExpression = t.RefreshPolicy.SourceExpression
3311
+
3312
+ if only_refresh_complete_days:
3313
+ rp.IncrementalPeriodsOffset = -1
3314
+ else:
3315
+ rp.IncrementalPeriodOffset = 0
3316
+
3317
+ if detect_data_changes_column is not None:
3318
+ fullDC = format_dax_object_name(table_name, detect_data_changes_column)
3319
+ ddcExpr = f"let Max{detect_data_changes_column} = List.Max({fullDC}), accountForNull = if Max{detect_data_changes_column} = null then #datetime(1901, 01, 01, 00, 00, 00) else Max{detect_data_changes_column} in accountForNull"
3320
+ rp.PollingExpression = ddcExpr
3321
+ else:
3322
+ rp.PollingExpression = None
3323
+
3324
+ t.RefreshPolicy = rp
3325
+
3326
+ self.show_incremental_refresh_policy(table_name=table_name)
3327
+
3328
+ def add_incremental_refresh_policy(
3329
+ self,
3330
+ table_name: str,
3331
+ column_name: str,
3332
+ start_date: str,
3333
+ end_date: str,
3334
+ incremental_granularity: str,
3335
+ incremental_periods: int,
3336
+ rolling_window_granularity: str,
3337
+ rolling_window_periods: int,
3338
+ only_refresh_complete_days: Optional[bool] = False,
3339
+ detect_data_changes_column: Optional[str] = None,
3340
+ ):
3341
+ """
3342
+ Adds anincremental refresh policy for a table within a semantic model.
3343
+
3344
+ Parameters
3345
+ ----------
3346
+ table_name : str
3347
+ Name of the table.
3348
+ column_name : str
3349
+ The DateTime column to be used for the RangeStart and RangeEnd parameters.
3350
+ start_date : str
3351
+ The date to be used for the RangeStart parameter.
3352
+ end_date : str
3353
+ The date to be used for the RangeEnd parameter.
3354
+ incremental_granularity : str
3355
+ Granularity of the (most recent) incremental refresh range.
3356
+ incremental_periods : int
3357
+ Number of periods for the incremental refresh range.
3358
+ rolling_window_granularity : str
3359
+ Target granularity of the rolling window for the whole semantic model.
3360
+ rolling_window_periods : int
3361
+ Number of periods for the rolling window for the whole semantic model.
3362
+ only_refresh_complete_days : bool, default=False
3363
+ Lag or leading periods from Now() to the rolling window head.
3364
+ detect_data_changes_column : str, default=None
3365
+ The column to use for detecting data changes.
3366
+ Defaults to None which resolves to not detecting data changes.
3367
+ """
3368
+ import Microsoft.AnalysisServices.Tabular as TOM
3369
+ import System
3370
+
3371
+ # https://learn.microsoft.com/en-us/power-bi/connect-data/incremental-refresh-configure
3372
+
3373
+ incGran = ["Day", "Month", "Quarter", "Year"]
3374
+
3375
+ incremental_granularity = incremental_granularity.capitalize()
3376
+ rolling_window_granularity = rolling_window_granularity.capitalize()
3377
+
3378
+ if incremental_granularity not in incGran:
3379
+ print(
3380
+ f"{icons.red_dot} Invalid 'incremental_granularity' value. Please choose from the following options: {incGran}."
3381
+ )
3382
+ return
3383
+ if rolling_window_granularity not in incGran:
3384
+ print(
3385
+ f"{icons.red_dot} Invalid 'rolling_window_granularity' value. Please choose from the following options: {incGran}."
3386
+ )
3387
+ return
3388
+
3389
+ if rolling_window_periods < 1:
3390
+ print(
3391
+ f"{icons.red_dot} Invalid 'rolling_window_periods' value. Must be a value greater than 0."
3392
+ )
3393
+ return
3394
+ if incremental_periods < 1:
3395
+ print(
3396
+ f"{icons.red_dot} Invalid 'incremental_periods' value. Must be a value greater than 0."
3397
+ )
3398
+ return
3399
+
3400
+ date_format = "%m/%d/%Y"
3401
+
3402
+ date_obj_start = datetime.strptime(start_date, date_format)
3403
+ start_year = date_obj_start.year
3404
+ start_month = date_obj_start.month
3405
+ start_day = date_obj_start.day
3406
+
3407
+ date_obj_end = datetime.strptime(end_date, date_format)
3408
+ end_year = date_obj_end.year
3409
+ end_month = date_obj_end.month
3410
+ end_day = date_obj_end.day
3411
+
3412
+ if date_obj_end <= date_obj_start:
3413
+ print(
3414
+ f"{icons.red_dot} Invalid 'start_date' or 'end_date'. The 'end_date' must be after the 'start_date'."
3415
+ )
3416
+ return
3417
+
3418
+ t = self.model.Tables[table_name]
3419
+
3420
+ c = t.Columns[column_name]
3421
+ fcName = format_dax_object_name(table_name, column_name)
3422
+ dType = c.DataType
3423
+
3424
+ if dType != TOM.DataType.DateTime:
3425
+ print(
3426
+ f"{icons.red_dot} The {fcName} column is of '{dType}' data type. The column chosen must be of DateTime data type."
3427
+ )
3428
+ return
3429
+
3430
+ if detect_data_changes_column is not None:
3431
+ dc = t.Columns[detect_data_changes_column]
3432
+ dcType = dc.DataType
3433
+
3434
+ if dcType != TOM.DataType.DateTime:
3435
+ print(
3436
+ f"{icons.red_dot} Invalid 'detect_data_changes_column' parameter. This column must be of DateTime data type."
3437
+ )
3438
+ return
3439
+
3440
+ # Start changes:
3441
+
3442
+ # Update partition expression
3443
+ i = 0
3444
+ for p in t.Partitions:
3445
+ if p.SourceType != TOM.PartitionSourceType.M:
3446
+ print(
3447
+ f"{icons.red_dot} Invalid partition source type. Incremental refresh can only be set up if the table's partition is an M-partition."
3448
+ )
3449
+ return
3450
+ elif i == 0:
3451
+ text = p.Expression
3452
+ text = text.rstrip()
3453
+
3454
+ ind = text.rfind(" ") + 1
3455
+ obj = text[ind:]
3456
+ pattern = r"in\s*[^ ]*"
3457
+ matches = list(re.finditer(pattern, text))
3458
+
3459
+ if matches:
3460
+ last_match = matches[-1]
3461
+ text_before_last_match = text[: last_match.start()]
3462
+
3463
+ print(text_before_last_match)
3464
+ else:
3465
+ print(f"{icons.red_dot} Invalid M-partition expression.")
3466
+ return
3467
+
3468
+ endExpr = f'#"Filtered Rows IR" = Table.SelectRows({obj}, each [{column_name}] >= RangeStart and [{column_name}] <= RangeEnd)\n#"Filtered Rows IR"'
3469
+ finalExpr = text_before_last_match + endExpr
3470
+
3471
+ p.Expression = finalExpr
3472
+ i += 1
3473
+
3474
+ # Add expressions
3475
+ self.add_expression(
3476
+ name="RangeStart",
3477
+ expression=f'datetime({start_year}, {start_month}, {start_day}, 0, 0, 0) meta [IsParameterQuery=true, Type="DateTime", IsParameterQueryRequired=true]',
3478
+ )
3479
+ self.add_expression(
3480
+ name="RangeEnd",
3481
+ expression=f'datetime({end_year}, {end_month}, {end_day}, 0, 0, 0) meta [IsParameterQuery=true, Type="DateTime", IsParameterQueryRequired=true]',
3482
+ )
3483
+
3484
+ # Update properties
3485
+ rp = TOM.BasicRefreshPolicy()
3486
+ rp.IncrementalPeriods = incremental_periods
3487
+ rp.IncrementalGranularity = System.Enum.Parse(
3488
+ TOM.RefreshGranularityType, incremental_granularity
3489
+ )
3490
+ rp.RollingWindowPeriods = rolling_window_periods
3491
+ rp.RollingWindowGranularity = System.Enum.Parse(
3492
+ TOM.RefreshGranularityType, rolling_window_granularity
3493
+ )
3494
+
3495
+ if only_refresh_complete_days:
3496
+ rp.IncrementalPeriodsOffset = -1
3497
+ else:
3498
+ rp.IncrementalPeriodOffset = 0
3499
+
3500
+ if detect_data_changes_column is not None:
3501
+ fullDC = format_dax_object_name(table_name, detect_data_changes_column)
3502
+ ddcExpr = f"let Max{detect_data_changes_column} = List.Max({fullDC}), accountForNull = if Max{detect_data_changes_column} = null then #datetime(1901, 01, 01, 00, 00, 00) else Max{detect_data_changes_column} in accountForNull"
3503
+ rp.PollingExpression = ddcExpr
3504
+
3505
+ t.RefreshPolicy = rp
3506
+
3507
+ self.show_incremental_refresh_policy(table_name=table_name)
3508
+
3509
+ def apply_refresh_policy(
3510
+ self,
3511
+ table_name: str,
3512
+ effective_date: Optional[datetime] = None,
3513
+ refresh: Optional[bool] = True,
3514
+ max_parallelism: Optional[int] = 0,
3515
+ ):
3516
+ """
3517
+ Applies the incremental refresh policy for a table within a semantic model.
3518
+
3519
+ Parameters
3520
+ ----------
3521
+ table_name : str
3522
+ Name of the table.
3523
+ effective_date : DateTime, default=None
3524
+ The effective date that is used when calculating the partitioning scheme.
3525
+ refresh : bool, default=True
3526
+ An indication if partitions of the table should be refreshed or not; the default behavior is to do the refresh.
3527
+ max_parallelism : int, default=0
3528
+ The degree of parallelism during the refresh execution.
3529
+ """
3530
+
3531
+ self.model.Tables[table_name].ApplyRefreshPolicy(
3532
+ effectiveDate=effective_date,
3533
+ refresh=refresh,
3534
+ maxParallelism=max_parallelism,
3535
+ )
3536
+
3537
+ def set_data_coverage_definition(
3538
+ self, table_name: str, partition_name: str, expression: str
3539
+ ):
3540
+ """
3541
+ Sets the data coverage definition for a partition.
3542
+
3543
+ Parameters
3544
+ ----------
3545
+ table_name : str
3546
+ Name of the table.
3547
+ partition_name : str
3548
+ Name of the partition.
3549
+ expression : str
3550
+ DAX expression containing the logic for the data coverage definition.
3551
+ """
3552
+ import Microsoft.AnalysisServices.Tabular as TOM
3553
+
3554
+ doc = "https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions"
3555
+
3556
+ t = self.model.Tables[table_name]
3557
+ p = t.Partitions[partition_name]
3558
+
3559
+ ht = self.is_hybrid_table(table_name=table_name)
3560
+
3561
+ if not ht:
3562
+ print(
3563
+ f"The data coverage definition property is only applicable to hybrid tables. See the documentation: {doc}."
3564
+ )
3565
+ return
3566
+ if p.Mode != TOM.ModeType.DirectQuery:
3567
+ print(
3568
+ f"The data coverage definition property is only applicable to the DirectQuery partition of a hybrid table. See the documentation: {doc}."
3569
+ )
3570
+ return
3571
+
3572
+ dcd = TOM.DataCoverageDefinition()
3573
+ dcd.Expression = expression
3574
+ p.DataCoverageDefinition = dcd
3575
+
3576
+ def set_encoding_hint(self, table_name: str, column_name: str, value: str):
3577
+ """
3578
+ Sets the encoding hint for a column.
3579
+
3580
+ Parameters
3581
+ ----------
3582
+ table_name : str
3583
+ Name of the table.
3584
+ column_name : str
3585
+ Name of the column.
3586
+ value : str
3587
+ Encoding hint value.
3588
+ `Encoding hint valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.encodinghinttype?view=analysisservices-dotnet>`_
3589
+ """
3590
+ import Microsoft.AnalysisServices.Tabular as TOM
3591
+ import System
3592
+
3593
+ values = ["Default", "Hash", "Value"]
3594
+ value = value.capitalize()
3595
+
3596
+ if value not in values:
3597
+ print(
3598
+ f"{icons.red_dot} Invalid encoding hint value. Please choose from these options: {values}."
3599
+ )
3600
+ return
3601
+
3602
+ self.model.Tables[table_name].Columns[column_name].EncodingHint = (
3603
+ System.Enum.Parse(TOM.EncodingHintType, value)
3604
+ )
3605
+
3606
+ def set_data_type(self, table_name: str, column_name: str, value: str):
3607
+ """
3608
+ Sets the data type for a column.
3609
+
3610
+ Parameters
3611
+ ----------
3612
+ table_name : str
3613
+ Name of the table.
3614
+ column_name : str
3615
+ Name of the column.
3616
+ value : str
3617
+ The data type.
3618
+ `Data type valid values <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.datatype?view=analysisservices-dotnet>`_
3619
+ """
3620
+ import System
3621
+
3622
+ values = [
3623
+ "Binary",
3624
+ "Boolean",
3625
+ "DateTime",
3626
+ "Decimal",
3627
+ "Double",
3628
+ "Int64",
3629
+ "String",
3630
+ ]
3631
+
3632
+ value = value.replace(" ", "").capitalize()
3633
+ if value == "Datetime":
3634
+ value = "DateTime"
3635
+ elif value.startswith("Int"):
3636
+ value = "Int64"
3637
+ elif value.startswith("Bool"):
3638
+ value = "Boolean"
3639
+
3640
+ if value not in values:
3641
+ print(
3642
+ f"{icons.red_dot} Invalid data type. Please choose from these options: {values}."
3643
+ )
3644
+ return
3645
+
3646
+ self.model.Tables[table_name].Columns[column_name].DataType = System.Enum.Parse(
3647
+ TOM.DataType, value
3648
+ )
3649
+
3650
+ def add_time_intelligence(
3651
+ self, measure_name: str, date_table: str, time_intel: Union[str, List[str]]
3652
+ ):
3653
+ """
3654
+ Adds time intelligence measures
3655
+
3656
+ Parameters
3657
+ ----------
3658
+ measure_name : str
3659
+ Name of the measure
3660
+ date_table : str
3661
+ Name of the date table.
3662
+ time_intel : str, List[str]
3663
+ Time intelligence measures to create (i.e. MTD, YTD, QTD).
3664
+ """
3665
+
3666
+ table_name = None
3667
+ time_intel_options = ["MTD", "QTD", "YTD"]
3668
+
3669
+ if isinstance(time_intel, str):
3670
+ time_intel = [time_intel]
3671
+
3672
+ # Validate time intelligence variations
3673
+ for t in time_intel:
3674
+ t = t.capitalize()
3675
+ if t not in [time_intel_options]:
3676
+ print(
3677
+ f"The '{t}' time intelligence variation is not supported. Valid options: {time_intel_options}."
3678
+ )
3679
+ return
3680
+
3681
+ # Validate measure and extract table name
3682
+ for m in self.all_measures():
3683
+ if m.Name == measure_name:
3684
+ table_name = m.Parent.Name
3685
+
3686
+ if table_name is None:
3687
+ print(
3688
+ f"The '{measure_name}' is not a valid measure in the '{self.dataset}' semantic model within the '{self.workspace}' workspace."
3689
+ )
3690
+ return
3691
+
3692
+ # Validate date table
3693
+ if not self.is_date_table(date_table):
3694
+ print(
3695
+ f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self.dataset}' wemantic model within the '{self.workspace}' workspace."
3696
+ )
3697
+ return
3698
+
3699
+ # Extract date key from date table
3700
+ for c in self.all_columns():
3701
+ if c.Parent.Name == date_table and c.IsKey:
3702
+ date_key = c.Name
3703
+
3704
+ # Create the new time intelligence measures
3705
+ for t in time_intel:
3706
+ if t == "MTD":
3707
+ expr = f"CALCULATE([{measure_name}],DATES{time_intel}('{date_table}'[{date_key}]))"
3708
+ new_meas_name = f"{measure_name} {t}"
3709
+ self.add_measure(
3710
+ table_name=table_name,
3711
+ measure_name=new_meas_name,
3712
+ expression=expr,
3713
+ )
3714
+
3715
+ def close(self):
3716
+ if not self.readonly and self.model is not None:
3717
+ self.model.SaveChanges()
3718
+
3719
+ if len(self.tables_added) > 0:
3720
+ refresh_semantic_model(
3721
+ dataset=self.dataset,
3722
+ tables=self.tables_added,
3723
+ workspace=self.workspace,
3724
+ )
3725
+ self.model = None
3726
+
3727
+ self.tom_server.Dispose()
3728
+
3729
+
3730
+ @log
3731
+ @contextmanager
3732
+ def connect_semantic_model(
3733
+ dataset: str, readonly: bool = True, workspace: Optional[str] = None
3734
+ ):
3735
+ """
3736
+ Connects to the Tabular Object Model (TOM) within a semantic model.
3737
+
3738
+ Parameters
3739
+ ----------
3740
+ dataset : str
3741
+ Name of the semantic model.
3742
+ readonly: bool, default=True
3743
+ Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
3744
+ workspace : str, default=None
3745
+ The Fabric workspace name.
3746
+ Defaults to None which resolves to the workspace of the attached lakehouse
3747
+ or if no lakehouse attached, resolves to the workspace of the notebook.
3748
+
3749
+ Returns
3750
+ -------
3751
+ str
3752
+ A connection to the semantic model's Tabular Object Model.
3753
+ """
3754
+
3755
+ # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined
3756
+ sempy.fabric._client._utils._init_analysis_services()
3757
+
3758
+ if workspace is None:
3759
+ workspace_id = fabric.get_workspace_id()
3760
+ workspace = fabric.resolve_workspace_name(workspace_id)
3761
+
3762
+ tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly)
3763
+ try:
3764
+ yield tw
3765
+ finally:
3766
+ tw.close()