semantic-link-labs 0.5.0__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (162) hide show
  1. semantic_link_labs-0.7.0/PKG-INFO +148 -0
  2. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/README.md +10 -7
  3. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/docs/source/conf.py +1 -1
  4. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/environment.yml +2 -1
  5. semantic_link_labs-0.7.0/notebooks/Best Practice Analyzer Report.ipynb +1 -0
  6. semantic_link_labs-0.7.0/notebooks/Migration to Direct Lake.ipynb +1 -0
  7. semantic_link_labs-0.7.0/notebooks/Model Optimization.ipynb +1 -0
  8. semantic_link_labs-0.7.0/notebooks/Semantic Model Refresh.ipynb +1 -0
  9. semantic_link_labs-0.7.0/notebooks/Tabular Object Model.ipynb +1 -0
  10. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/pyproject.toml +13 -5
  11. semantic_link_labs-0.7.0/src/semantic_link_labs.egg-info/PKG-INFO +148 -0
  12. semantic_link_labs-0.7.0/src/semantic_link_labs.egg-info/SOURCES.txt +141 -0
  13. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/semantic_link_labs.egg-info/requires.txt +2 -1
  14. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/__init__.py +45 -15
  15. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_ai.py +42 -85
  16. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  17. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  18. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  19. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  20. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  21. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  22. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  23. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  24. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  25. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  26. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  27. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  28. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  29. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  30. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  31. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  32. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  33. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  34. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  35. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  36. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  37. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  38. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  39. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  40. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  41. semantic_link_labs-0.7.0/src/sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  42. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_clear_cache.py +12 -8
  43. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_connections.py +77 -70
  44. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_dax.py +7 -9
  45. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_generate_semantic_model.py +75 -90
  46. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_helper_functions.py +371 -20
  47. semantic_link_labs-0.7.0/src/sempy_labs/_icons.py +33 -0
  48. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_list_functions.py +855 -427
  49. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_model_auto_build.py +4 -3
  50. semantic_link_labs-0.7.0/src/sempy_labs/_model_bpa.py +539 -0
  51. semantic_link_labs-0.7.0/src/sempy_labs/_model_bpa_bulk.py +363 -0
  52. semantic_link_labs-0.7.0/src/sempy_labs/_model_bpa_rules.py +831 -0
  53. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_model_dependencies.py +20 -16
  54. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_one_lake_integration.py +18 -12
  55. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_query_scale_out.py +116 -129
  56. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_refresh_semantic_model.py +23 -10
  57. semantic_link_labs-0.7.0/src/sempy_labs/_translations.py +457 -0
  58. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/_vertipaq.py +152 -123
  59. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/__init__.py +7 -1
  60. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_directlake_schema_compare.py +33 -30
  61. semantic_link_labs-0.7.0/src/sempy_labs/directlake/_directlake_schema_sync.py +106 -0
  62. semantic_link_labs-0.7.0/src/sempy_labs/directlake/_dl_helper.py +233 -0
  63. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
  64. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_get_shared_expression.py +5 -3
  65. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_guardrails.py +20 -16
  66. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  67. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  68. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  69. semantic_link_labs-0.7.0/src/sempy_labs/directlake/_update_directlake_partition_entity.py +221 -0
  70. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/directlake/_warm_cache.py +7 -4
  71. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  72. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
  73. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/lakehouse/_lakehouse.py +5 -3
  74. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/lakehouse/_shortcuts.py +20 -13
  75. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/migration/__init__.py +1 -1
  76. semantic_link_labs-0.7.0/src/sempy_labs/migration/_create_pqt_file.py +228 -0
  77. semantic_link_labs-0.7.0/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +400 -0
  78. semantic_link_labs-0.7.0/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +151 -0
  79. semantic_link_labs-0.7.0/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +538 -0
  80. semantic_link_labs-0.7.0/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +159 -0
  81. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/migration/_migration_validation.py +2 -2
  82. semantic_link_labs-0.7.0/src/sempy_labs/migration/_refresh_calc_tables.py +123 -0
  83. semantic_link_labs-0.7.0/src/sempy_labs/report/_BPAReportTemplate.json +232 -0
  84. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/report/__init__.py +6 -2
  85. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  86. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/.platform +11 -0
  87. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  88. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  89. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  90. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  91. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  92. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  93. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  94. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  95. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  96. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  97. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  98. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  99. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  100. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  101. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  102. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  103. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  104. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  105. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  106. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  107. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  108. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  109. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  110. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  111. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  112. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  113. semantic_link_labs-0.7.0/src/sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  114. semantic_link_labs-0.7.0/src/sempy_labs/report/_generate_report.py +369 -0
  115. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/report/_report_functions.py +90 -59
  116. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/report/_report_rebind.py +40 -34
  117. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/tom/__init__.py +1 -4
  118. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/tom/_model.py +601 -181
  119. semantic_link_labs-0.5.0/PKG-INFO +0 -21
  120. semantic_link_labs-0.5.0/notebooks/Migration to Direct Lake.ipynb +0 -1
  121. semantic_link_labs-0.5.0/notebooks/Model Optimization.ipynb +0 -1
  122. semantic_link_labs-0.5.0/notebooks/Tabular Object Model.ipynb +0 -1
  123. semantic_link_labs-0.5.0/src/semantic_link_labs.egg-info/PKG-INFO +0 -21
  124. semantic_link_labs-0.5.0/src/semantic_link_labs.egg-info/SOURCES.txt +0 -81
  125. semantic_link_labs-0.5.0/src/sempy_labs/_icons.py +0 -10
  126. semantic_link_labs-0.5.0/src/sempy_labs/_model_bpa.py +0 -1350
  127. semantic_link_labs-0.5.0/src/sempy_labs/_translations.py +0 -378
  128. semantic_link_labs-0.5.0/src/sempy_labs/directlake/_directlake_schema_sync.py +0 -123
  129. semantic_link_labs-0.5.0/src/sempy_labs/directlake/_fallback.py +0 -58
  130. semantic_link_labs-0.5.0/src/sempy_labs/directlake/_update_directlake_partition_entity.py +0 -74
  131. semantic_link_labs-0.5.0/src/sempy_labs/migration/_create_pqt_file.py +0 -230
  132. semantic_link_labs-0.5.0/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +0 -429
  133. semantic_link_labs-0.5.0/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +0 -150
  134. semantic_link_labs-0.5.0/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +0 -519
  135. semantic_link_labs-0.5.0/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +0 -165
  136. semantic_link_labs-0.5.0/src/sempy_labs/migration/_refresh_calc_tables.py +0 -129
  137. semantic_link_labs-0.5.0/src/sempy_labs/report/_generate_report.py +0 -248
  138. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  139. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  140. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.github/ISSUE_TEMPLATE/issue--question---advice-needed.md +0 -0
  141. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.github/workflows/build.yaml +0 -0
  142. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.github/workflows/codeql.yaml +0 -0
  143. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.gitignore +0 -0
  144. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.readthedocs.yaml +0 -0
  145. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/.vscode/settings.json +0 -0
  146. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/CODE_OF_CONDUCT.md +0 -0
  147. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/LICENSE +0 -0
  148. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/SECURITY.md +0 -0
  149. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/SUPPORT.md +0 -0
  150. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/docs/Makefile +0 -0
  151. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/docs/make.bat +0 -0
  152. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/docs/requirements.txt +0 -0
  153. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/docs/source/index.rst +0 -0
  154. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/docs/source/modules.rst +0 -0
  155. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/notebooks/Query Scale Out.ipynb +0 -0
  156. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/setup.cfg +0 -0
  157. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
  158. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
  159. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/src/sempy_labs/lakehouse/__init__.py +0 -0
  160. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/tests/__init__.py +0 -0
  161. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/tests/test_shortcuts.py +0 -0
  162. {semantic_link_labs-0.5.0 → semantic_link_labs-0.7.0}/tests/test_tom.py +0 -0
@@ -0,0 +1,148 @@
1
+ Metadata-Version: 2.1
2
+ Name: semantic-link-labs
3
+ Version: 0.7.0
4
+ Summary: Semantic Link Labs for Microsoft Fabric
5
+ Author: Microsoft Corporation
6
+ License: MIT License
7
+ Project-URL: Repository, https://github.com/microsoft/semantic-link-labs.git
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Intended Audience :: Education
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3 :: Only
14
+ Classifier: Framework :: Jupyter
15
+ Requires-Python: <3.12,>=3.10
16
+ Description-Content-Type: text/markdown
17
+ License-File: LICENSE
18
+ Requires-Dist: semantic-link-sempy>=0.7.7
19
+ Requires-Dist: anytree
20
+ Requires-Dist: powerbiclient
21
+ Requires-Dist: polib
22
+ Provides-Extra: test
23
+ Requires-Dist: pytest>=8.2.1; extra == "test"
24
+
25
+ # Semantic Link Labs
26
+
27
+ [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
28
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
30
+ [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
31
+
32
+ ---
33
+ [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
34
+ ---
35
+
36
+ This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
37
+
38
+ Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
39
+
40
+ If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
41
+
42
+ If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
43
+
44
+ ## Install the library in a Fabric notebook
45
+ ```python
46
+ %pip install semantic-link-labs
47
+ ```
48
+
49
+ ## Once installed, run this code to import the library into your notebook
50
+ ```python
51
+ import sempy_labs as labs
52
+ from sempy_labs import migration, directlake
53
+ from sempy_labs import lakehouse as lake
54
+ from sempy_labs import report as rep
55
+ from sempy_labs.tom import connect_semantic_model
56
+ ```
57
+
58
+ ## Load semantic-link-labs into a custom [Fabric environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment)
59
+ An even better way to ensure the semantic-link-labs library is available in your workspace/notebooks is to load it as a library in a custom Fabric environment. If you do this, you will not have to run the above '%pip install' code every time in your notebook. Please follow the steps below.
60
+
61
+ #### Create a custom environment
62
+ 1. Navigate to your Fabric workspace
63
+ 2. Click 'New' -> More options
64
+ 3. Within 'Data Science', click 'Environment'
65
+ 4. Name your environment, click 'Create'
66
+
67
+ #### Add semantic-link-labs as a library to the environment
68
+ 1. Within 'Public libraries', click 'Add from PyPI'
69
+ 2. Enter 'semantic-link-labs'.
70
+ 3. Click 'Save' at the top right of the screen
71
+ 4. Click 'Publish' at the top right of the screen
72
+ 5. Click 'Publish All'
73
+
74
+ #### Update your notebook to use the new environment (*must wait for the environment to finish publishing*)
75
+ 1. Navigate to your Notebook
76
+ 2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
77
+
78
+ ---
79
+ ## Direct Lake migration
80
+
81
+ The following process automates the migration of an import/DirectQuery model to a new [Direct Lake](https://learn.microsoft.com/power-bi/enterprise/directlake-overview) model. The first step is specifically applicable to models which use Power Query to perform data transformations. If your model does not use Power Query, you must migrate the base tables used in your semantic model to a Fabric lakehouse.
82
+
83
+ Check out [Nikola Ilic](https://twitter.com/DataMozart)'s terrific [blog post](https://data-mozart.com/migrate-existing-power-bi-semantic-models-to-direct-lake-a-step-by-step-guide/) on this topic!
84
+
85
+ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) on this topic!
86
+
87
+ [![Direct Lake Migration Video](https://img.youtube.com/vi/gGIxMrTVyyI/0.jpg)](https://www.youtube.com/watch?v=gGIxMrTVyyI?t=495)
88
+
89
+ ### Prerequisites
90
+
91
+ * Make sure you [enable XMLA Read/Write](https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#enable-xmla-read-write) for your capacity
92
+ * Make sure you have a [lakehouse](https://learn.microsoft.com/fabric/onelake/create-lakehouse-onelake#create-a-lakehouse) in a Fabric workspace
93
+ * Enable the following [setting](https://learn.microsoft.com/power-bi/transform-model/service-edit-data-models#enable-the-preview-feature): Workspace -> Workspace Settings -> General -> Data model settings -> Users can edit data models in the Power BI service
94
+
95
+ ### Instructions
96
+
97
+ 1. Download this [notebook](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Migration%20to%20Direct%20Lake.ipynb).
98
+ 2. Make sure you are in the ['Data Engineering' persona](https://learn.microsoft.com/fabric/get-started/microsoft-fabric-overview#components-of-microsoft-fabric). Click the icon at the bottom left corner of your Workspace screen and select 'Data Engineering'
99
+ 3. In your workspace, select 'New -> Import notebook' and import the notebook from step 1.
100
+ 4. [Add your lakehouse](https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse) to your Fabric notebook
101
+ 5. Follow the instructions within the notebook.
102
+
103
+ ### The migration process
104
+
105
+ > [!NOTE]
106
+ > The first 4 steps are only necessary if you have logic in Power Query. Otherwise, you will need to migrate your semantic model source tables to lakehouse tables.
107
+
108
+ 1. The first step of the notebook creates a Power Query Template (.pqt) file which eases the migration of Power Query logic to Dataflows Gen2.
109
+ 2. After the .pqt file is created, sync files from your [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222)
110
+ 3. Navigate to your lakehouse (this is critical!). From your lakehouse, create a new Dataflows Gen2, and import the Power Query Template file. Doing this step from your lakehouse will automatically set the destination for all tables to this lakehouse (instead of having to manually map each one).
111
+ 4. Publish the Dataflow Gen2 and wait for it to finish creating the delta lake tables in your lakehouse.
112
+ 5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
113
+
114
+ > [!NOTE]
115
+ > As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
116
+
117
+ 6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
118
+
119
+ ### Completing these steps will do the following:
120
+ * Offload your Power Query logic to Dataflows Gen2 inside of Fabric (where it can be maintained and development can continue).
121
+ * Dataflows Gen2 will create delta tables in your Fabric lakehouse. These tables can then be used for your Direct Lake model.
122
+ * Create a new semantic model in Direct Lake mode containing all the standard tables and columns, calculation groups, measures, relationships, hierarchies, roles, row level security, perspectives, and translations from your original semantic model.
123
+ * Viable calculated tables are migrated to the new semantic model as data tables. Delta tables are dynamically generated in the lakehouse to support the Direct Lake model. The calculated table DAX logic is stored as model annotations in the new semantic model.
124
+ * Field parameters are migrated to the new semantic model as they were in the original semantic model (as calculated tables). Any calculated columns used in field parameters are automatically removed in the new semantic model's field parameter(s).
125
+ * Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
126
+ * Reports used by your original semantic model will be rebinded to your new semantic model.
127
+
128
+ ## Contributing
129
+
130
+ This project welcomes contributions and suggestions. Most contributions require you to agree to a
131
+ Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
132
+ the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
133
+
134
+ When you submit a pull request, a CLA bot will automatically determine whether you need to provide
135
+ a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
136
+ provided by the bot. You will only need to do this once across all repos using our CLA.
137
+
138
+ This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
139
+ For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
140
+ contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
141
+
142
+ ## Trademarks
143
+
144
+ This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
145
+ trademarks or logos is subject to and must follow
146
+ [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
147
+ Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
148
+ Any use of third-party trademarks or logos are subject to those third-party's policies.
@@ -1,13 +1,15 @@
1
- # [semantic-link-labs](https://semantic-link-labs.readthedocs.io/en/0.5.0/)
1
+ # Semantic Link Labs
2
2
 
3
3
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
4
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.5.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
4
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
5
5
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
6
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
7
7
 
8
- All functions in this library are documented [here](https://semantic-link-labs.readthedocs.io/en/0.5.0/)!
8
+ ---
9
+ [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
10
+ ---
9
11
 
10
- This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) and more!
12
+ This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
11
13
 
12
14
  Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
13
15
 
@@ -15,8 +17,6 @@ If you encounter any issues, please [raise a bug](https://github.com/microsoft/s
15
17
 
16
18
  If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
17
19
 
18
- ## [Function documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/)
19
-
20
20
  ## Install the library in a Fabric notebook
21
21
  ```python
22
22
  %pip install semantic-link-labs
@@ -25,8 +25,9 @@ If you have ideas for new features/functions, please [request a feature](https:/
25
25
  ## Once installed, run this code to import the library into your notebook
26
26
  ```python
27
27
  import sempy_labs as labs
28
- from sempy_labs import migration, report, directlake
28
+ from sempy_labs import migration, directlake
29
29
  from sempy_labs import lakehouse as lake
30
+ from sempy_labs import report as rep
30
31
  from sempy_labs.tom import connect_semantic_model
31
32
  ```
32
33
 
@@ -59,6 +60,8 @@ Check out [Nikola Ilic](https://twitter.com/DataMozart)'s terrific [blog post](h
59
60
 
60
61
  Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) on this topic!
61
62
 
63
+ [![Direct Lake Migration Video](https://img.youtube.com/vi/gGIxMrTVyyI/0.jpg)](https://www.youtube.com/watch?v=gGIxMrTVyyI?t=495)
64
+
62
65
  ### Prerequisites
63
66
 
64
67
  * Make sure you [enable XMLA Read/Write](https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#enable-xmla-read-write) for your capacity
@@ -13,7 +13,7 @@ sys.path.insert(0, os.path.abspath('../../src/'))
13
13
  project = 'semantic-link-labs'
14
14
  copyright = '2024, Microsoft and community'
15
15
  author = 'Microsoft and community'
16
- release = '0.5.0'
16
+ release = '0.7.0'
17
17
 
18
18
  # -- General configuration ---------------------------------------------------
19
19
  # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
@@ -9,4 +9,5 @@ dependencies:
9
9
  - semantic-link-sempy>=0.7.5
10
10
  - azure-identity==1.7.1
11
11
  - azure-storage-blob>=12.9.0
12
- - pandas-stubs
12
+ - pandas-stubs
13
+ - types-tqdm
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"b195eae8","metadata":{},"source":["### Import the library and necessary packages"]},{"cell_type":"code","execution_count":null,"id":"1344e286","metadata":{},"outputs":[],"source":["import sempy_labs as labs\n","import sempy_labs.report as rep"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Collect semantic model Best Practice Analyzer stats"]},{"cell_type":"markdown","id":"8702e95b","metadata":{},"source":["#### Collect stats for all semantic models within a single workspace"]},{"cell_type":"code","execution_count":null,"id":"9e349954","metadata":{},"outputs":[],"source":["labs.run_model_bpa_bulk(workspace='Workspace 1')"]},{"cell_type":"markdown","id":"8281d30d","metadata":{},"source":["#### Collect stats for all semantic models within a list of workspaces"]},{"cell_type":"code","execution_count":null,"id":"d6b09b86","metadata":{},"outputs":[],"source":["labs.run_model_bpa_bulk(workspace=['Workspace 1', 'Workspace 2'])"]},{"cell_type":"markdown","id":"ec9109e4","metadata":{},"source":["#### Collect stats for all semantic models within all accessible workspaces"]},{"cell_type":"code","execution_count":null,"id":"e08860da","metadata":{},"outputs":[],"source":["labs.run_model_bpa_bulk(workspace=None)"]},{"cell_type":"markdown","id":"113b04a7","metadata":{},"source":["#### Create a Direct Lake semantic model (called 'ModelBPA') for analyzing the Best Practice Analyzer results"]},{"cell_type":"code","execution_count":null,"id":"b4e1296b","metadata":{},"outputs":[],"source":["labs.create_model_bpa_semantic_model()"]},{"cell_type":"markdown","id":"7f94b13a","metadata":{},"source":["#### Create a Power BI report called 'ModelBPA' based semantic model created in the previous cell, which can be used to analyze the Best Practice Analyzer results"]},{"cell_type":"code","execution_count":null,"id":"17565d35","metadata":{},"outputs":[],"source":["rep.create_model_bpa_report()"]},{"cell_type":"markdown","id":"d41bdae4","metadata":{},"source":["<div class=\"alert alert-block alert-info\">\n","<b>Note:</b> The 'BPAReport' Power BI report is located within the workspace in which the default lakehouse attached to this notebook resides. Navigate to this workspace to open the report and view the Best Practice Analyzer results.\n","</div>\n","\n","Going forward, you just need to run the 'run_model_bpa_bulk' function which will append BPA results to the 'modelbparesults' delta table in your lakehouse. Since the 'BPAModel' semantic model is in Direct Lake mode, the data will appear in the semantic model and report automatically without any need for processing the semantic model.\n","\n"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"969a29bf","metadata":{},"source":["### Import the library and set initial parameters"]},{"cell_type":"code","execution_count":null,"id":"29c923f8","metadata":{},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs import migration, directlake\n","import sempy_labs.report as rep\n","\n","dataset_name = '' #Enter the import/DQ semantic model name\n","workspace_name = None #Enter the workspace of the import/DQ semantic model. It set to none it will use the current workspace.\n","new_dataset_name = '' #Enter the new Direct Lake semantic model name\n","new_dataset_workspace_name = None #Enter the workspace where the Direct Lake model will be created. If set to None it will use the current workspace.\n","lakehouse_name = None #Enter the lakehouse to be used for the Direct Lake model. If set to None it will use the lakehouse attached to the notebook.\n","lakehouse_workspace_name = None #Enter the lakehouse workspace. If set to None it will use the new_dataset_workspace_name."]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Create the [Power Query Template](https://learn.microsoft.com/power-query/power-query-template) file\n","\n","This encapsulates all of the semantic model's Power Query logic into a single file."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["migration.create_pqt_file(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"bf945d07-544c-4934-b7a6-cfdb90ca725e","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Import the Power Query Template to Dataflows Gen2\n","\n","- Open the [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222) and sync your files (right click -> Sync from OneLake)\n","\n","- Navigate to your lakehouse. From this window, create a new Dataflows Gen2 and import the Power Query Template file from OneLake (OneLake -> Workspace -> Lakehouse -> Files...), and publish the Dataflows Gen2.\n","\n","<div class=\"alert alert-block alert-info\">\n","<b>Important!</b> Make sure to create the Dataflows Gen2 from within the lakehouse window. That will ensure that all the tables automatically map to that lakehouse as the destination. Otherwise, you will have to manually map each table to its destination individually.\n","</div>"]},{"cell_type":"markdown","id":"9975db7d","metadata":{},"source":["### Create the Direct Lake model based on the import/DQ semantic model\n","\n","Calculated columns are not migrated to the Direct Lake model as they are not supported in Direct Lake mode."]},{"cell_type":"code","execution_count":null,"id":"0a3616b5-566e-414e-a225-fb850d6418dc","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import time\n","labs.create_blank_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","\n","migration.migrate_calc_tables_to_lakehouse(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name\n",")\n","migration.migrate_tables_columns_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name\n",")\n","migration.migrate_calc_tables_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name\n",")\n","migration.migrate_model_objects_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name\n",")\n","migration.migrate_field_parameters(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name\n",")\n","time.sleep(2)\n","labs.refresh_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","migration.refresh_calc_tables(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","labs.refresh_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"bb98bb13","metadata":{},"source":["### Show migrated/unmigrated objects"]},{"cell_type":"code","execution_count":null,"id":"5db2f22c","metadata":{},"outputs":[],"source":["migration.migration_validation(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name, \n"," workspace = workspace_name, \n"," new_dataset_workspace = new_dataset_workspace_name\n",")"]},{"cell_type":"markdown","id":"fa244e9d-87c2-4a66-a7e0-be539a0ac7de","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Rebind all reports using the old semantic model to the new Direct Lake semantic model"]},{"cell_type":"code","execution_count":null,"id":"d4e867cc","metadata":{},"outputs":[],"source":["rep.report_rebind_all(\n"," dataset = dataset_name,\n"," dataset_workspace = workspace_name,\n"," new_dataset = new_dataset_name,\n"," new_dataset_workpace = new_dataset_workspace_name,\n"," report_workspace = None\n",")"]},{"cell_type":"markdown","id":"3365d20d","metadata":{},"source":["### Rebind reports one-by-one (optional)"]},{"cell_type":"code","execution_count":null,"id":"056b7180-d7ac-492c-87e7-ac7d0e4bb929","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["report_name = '' # Enter report name which you want to rebind to the new Direct Lake model\n","\n","rep.report_rebind(\n"," report = report_name,\n"," dataset = new_dataset_name,\n"," report_workspace=workspace_name,\n"," dataset_workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"526f2327","metadata":{},"source":["### Show unsupported objects"]},{"cell_type":"code","execution_count":null,"id":"a47376d7","metadata":{},"outputs":[],"source":["dfT, dfC, dfR = directlake.show_unsupported_direct_lake_objects(dataset = dataset_name, workspace = workspace_name)\n","\n","print('Calculated Tables are not supported...')\n","display(dfT)\n","print(\"Learn more about Direct Lake limitations here: https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations\")\n","print('Calculated columns are not supported. Columns of binary data type are not supported.')\n","display(dfC)\n","print('Columns used for relationship cannot be of data type datetime and they also must be of the same data type.')\n","display(dfR)"]},{"cell_type":"markdown","id":"ed08ba4c","metadata":{},"source":["### Schema check between semantic model tables/columns and lakehouse tables/columns\n","\n","This will list any tables/columns which are in the new semantic model but do not exist in the lakehouse"]},{"cell_type":"code","execution_count":null,"id":"03889ba4","metadata":{},"outputs":[],"source":["directlake.direct_lake_schema_compare(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"2229963b","metadata":{},"source":["### Show calculated tables which have been migrated to the Direct Lake semantic model as regular tables"]},{"cell_type":"code","execution_count":null,"id":"dd537d90","metadata":{},"outputs":[],"source":["directlake.list_direct_lake_model_calc_tables(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"name":"python","version":"3.12.3"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"cd8de5a0","metadata":{},"source":["### Import the library"]},{"cell_type":"code","execution_count":null,"id":"5cc6eedf","metadata":{},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs import lakehouse as lake\n","from sempy_labs import directlake\n","import sempy_labs.report as rep\n","\n","dataset_name = ''\n","workspace_name = None"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Vertipaq Analyzer"]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["labs.vertipaq_analyzer(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"419a348f","metadata":{},"source":["Export the Vertipaq Analyzer results to a .zip file in your lakehouse"]},{"cell_type":"code","execution_count":null,"id":"8aa239b3","metadata":{},"outputs":[],"source":["labs.vertipaq_analyzer(dataset = dataset_name, workspace = workspace_name, export = 'zip')"]},{"cell_type":"markdown","id":"2dce0f4f","metadata":{},"source":["Export the Vertipaq Analyzer results to append to delta tables in your lakehouse."]},{"cell_type":"code","execution_count":null,"id":"aef93fc8","metadata":{},"outputs":[],"source":["labs.vertipaq_analyzer(dataset = dataset_name, workspace = workspace_name, export = 'table')"]},{"cell_type":"markdown","id":"1c62a802","metadata":{},"source":["Visualize the contents of an exported Vertipaq Analzyer .zip file."]},{"cell_type":"code","execution_count":null,"id":"9e349954","metadata":{},"outputs":[],"source":["labs.import_vertipaq_analyzer(folder_path = '', file_name = '')"]},{"cell_type":"markdown","id":"456ce0ff","metadata":{},"source":["### Best Practice Analzyer\n","\n","This runs the [standard rules](https://github.com/microsoft/Analysis-Services/tree/master/BestPracticeRules) for semantic models posted on Microsoft's GitHub."]},{"cell_type":"code","execution_count":null,"id":"0a3616b5-566e-414e-a225-fb850d6418dc","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["labs.run_model_bpa(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"6fb32a58","metadata":{},"source":["This runs the Best Practice Analyzer and exports the results to the 'modelbparesults' delta table in your Fabric lakehouse."]},{"cell_type":"code","execution_count":null,"id":"677851c3","metadata":{},"outputs":[],"source":["labs.run_model_bpa(dataset = dataset_name, workspace = workspace_name, export = True)"]},{"cell_type":"markdown","id":"64968a31","metadata":{},"source":["This runs the Best Practice Analyzer with the rules translated into Italian."]},{"cell_type":"code","execution_count":null,"id":"3c7d89e2","metadata":{},"outputs":[],"source":["labs.run_model_bpa(dataset = dataset_name, workspace = workspace_name, language = 'it-IT')"]},{"cell_type":"markdown","id":"255c30bb","metadata":{},"source":["<div class=\"alert alert-block alert-info\">\n","<b>Note:</b> For analyzing model BPA results at scale, see the Best Practice Analyzer Report notebook (link below).\n","</div>\n","\n","[Best Practice Analyzer Notebook](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Best%20Practice%20Analyzer%20Report.ipynb)"]},{"cell_type":"markdown","id":"8126a1a1","metadata":{},"source":["### Direct Lake\n","\n","Check if any lakehouse tables will hit the [Direct Lake guardrails](https://learn.microsoft.com/power-bi/enterprise/directlake-overview#fallback)."]},{"cell_type":"code","execution_count":null,"id":"e7397b15","metadata":{},"outputs":[],"source":["lake.get_lakehouse_tables(lakehouse = None, workspace = None, extended = True, count_rows = False)"]},{"cell_type":"code","execution_count":null,"id":"b30074cf","metadata":{},"outputs":[],"source":["lake.get_lakehouse_tables(lakehouse = None, workspace = None, extended = True, count_rows = False, export = True)"]},{"cell_type":"markdown","id":"99b84f2b","metadata":{},"source":["Check if any tables in a Direct Lake semantic model will fall back to DirectQuery."]},{"cell_type":"code","execution_count":null,"id":"f837be58","metadata":{},"outputs":[],"source":["directlake.check_fallback_reason(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"8f6df93e","metadata":{},"source":["### [OPTIMIZE](https://docs.delta.io/latest/optimizations-oss.html) your lakehouse delta tables."]},{"cell_type":"code","execution_count":null,"id":"e0262c9e","metadata":{},"outputs":[],"source":["lake.optimize_lakehouse_tables(tables = ['', ''], lakehouse = None, workspace = None)"]},{"cell_type":"markdown","id":"0091d6a0","metadata":{},"source":["Refresh/reframe your Direct Lake semantic model and restore the columns which were in memory prior to the refresh."]},{"cell_type":"code","execution_count":null,"id":"77eef082","metadata":{},"outputs":[],"source":["directlake.warm_direct_lake_cache_isresident(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"dae1a210","metadata":{},"source":["Ensure a warm cache for your users by putting the columns of a Direct Lake semantic model into memory based on the contents of a [perspective](https://learn.microsoft.com/analysis-services/tabular-models/perspectives-ssas-tabular?view=asallproducts-allversions).\n","\n","Perspectives can be created either in [Tabular Editor 3](https://github.com/TabularEditor/TabularEditor3/releases/latest) or in [Tabular Editor 2](https://github.com/TabularEditor/TabularEditor/releases/latest) using the [Perspective Editor](https://www.elegantbi.com/post/perspectiveeditor)."]},{"cell_type":"code","execution_count":null,"id":"43297001","metadata":{},"outputs":[],"source":["directlake.warm_direct_lake_cache_perspective(dataset = dataset_name, workspace = workspace_name, perspective = '', add_dependencies = True)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"b195eae8","metadata":{},"source":["### Import the library and set the initial parameters"]},{"cell_type":"code","execution_count":null,"id":"1344e286","metadata":{},"outputs":[],"source":["import sempy.fabric as fabric\n","import sempy_labs as labs\n","dataset = '' # Enter your dataset name\n","workspace = None # Enter your workspace name (if set to None it will use the workspace in which the notebook is running)"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Refresh a semantic model"]},{"cell_type":"code","execution_count":null,"id":"9e349954","metadata":{},"outputs":[],"source":["labs.refresh_semantic_model(dataset=dataset, workspace=workspace)"]},{"cell_type":"markdown","id":"113b04a7","metadata":{},"source":["#### Refresh specific tables"]},{"cell_type":"code","execution_count":null,"id":"b4e1296b","metadata":{},"outputs":[],"source":["labs.refresh_semantic_model(dataset=dataset, workspace=workspace, tables=['Sales', 'Geography'])"]},{"cell_type":"markdown","id":"7f94b13a","metadata":{},"source":["#### Refresh specific partitions"]},{"cell_type":"code","execution_count":null,"id":"17565d35","metadata":{},"outputs":[],"source":["labs.refresh_semantic_model(dataset=dataset, workspace=workspace, parttions=[\"'Sales'[Sales FY20]\", \"'Sales'[Sales FY21]\"])"]},{"cell_type":"markdown","id":"aab5ca7c","metadata":{},"source":["#### Refresh a combination of tables and partitions"]},{"cell_type":"code","execution_count":null,"id":"e5818bd1","metadata":{},"outputs":[],"source":["labs.refresh_semantic_model(dataset=dataset, workspace=workspace, tables=['Geography', 'Calendar'], partitions=[\"'Sales'[Sales FY20]\", \"'Sales'[Sales FY21]\"])"]},{"cell_type":"markdown","id":"7f7074ea","metadata":{},"source":["#### Clear the values of a table"]},{"cell_type":"code","execution_count":null,"id":"3b1eb772","metadata":{},"outputs":[],"source":["labs.refresh_semantic_model(dataset=dataset, workspace=workspace, refresh_type='clearValues')"]},{"cell_type":"markdown","id":"29afede1","metadata":{},"source":["### View semantic model refreshes"]},{"cell_type":"code","execution_count":null,"id":"95c52cc0","metadata":{},"outputs":[],"source":["fabric.list_refresh_requests(dataset=dataset, workspace=workspace)"]},{"cell_type":"markdown","id":"fa7c525c","metadata":{},"source":["### Cancel a semantic model refresh"]},{"cell_type":"code","execution_count":null,"id":"5bb6f79f","metadata":{},"outputs":[],"source":["labs.cancel_dataset_refresh(dataset=dataset, workspace=workspace)"]},{"cell_type":"code","execution_count":null,"id":"acd34900","metadata":{},"outputs":[],"source":["# Specify the request_id based on a value from list_refresh_requests\n","labs.cancel_dataset_refresh(dataset=dataset, workspace=workspace, request_id='')"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Connect to the [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))\n","Setting the 'readonly' property to False enables read/write mode. This allows changes to be made to the semantic model."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs.tom import connect_semantic_model\n","\n","dataset = '' # Enter dataset name\n","workspace = None # Enter workspace name\n","\n","with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," print(t.Name)"]},{"cell_type":"markdown","id":"fc6b277e","metadata":{},"source":["### Make changes to a semantic model using custom functions\n","Note that the custom functions have additional optional parameters (which may not be used in the examples below) for adding properties to model objects. Check the [documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) to see all available parameters for each function."]},{"cell_type":"markdown","id":"6d46d878","metadata":{},"source":["#### Rename objects in the semantic model"]},{"cell_type":"code","execution_count":null,"id":"1284825a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," t.Name = t.Name.replace('_',' ')\n"]},{"cell_type":"code","execution_count":null,"id":"d3b60303","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," c.Name = c.Name.replace('_',' ')"]},{"cell_type":"markdown","id":"402a477c","metadata":{},"source":["#### Add measure(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"bdaaaa5c","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\") "]},{"cell_type":"code","execution_count":null,"id":"a53a544b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Internet Sales':\n"," tom.add_measure(table_name = t.Name, measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name = t.Name, measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\")"]},{"cell_type":"markdown","id":"1cb1632f","metadata":{},"source":["#### Add column(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"81a22749","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_data_column(table_name ='Product', column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," tom.add_data_column(table_name = 'Segment', column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n","\n"," tom.add_calculated_column(table_name = 'Internet Sales', column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"code","execution_count":null,"id":"053b6516","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," elif t.Name == 'Segment':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n"," elif t.Name == 'Internet Sales':\n"," tom.add_calculated_column(table_name = t.Name, column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"markdown","id":"f53dcca7","metadata":{},"source":["#### Add hierarchies to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"a9309e23","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_hierarchy(table_name = 'Geography', hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"code","execution_count":null,"id":"a04281ce","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Geography':\n"," tom.add_hierarchy(table_name = t.Name, hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"markdown","id":"47c06a4f","metadata":{},"source":["#### Add relationship(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"e8cd7bbf","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_relationship(\n"," from_table = 'Internet Sales', from_column = 'ProductKey',\n"," to_table = 'Product', to_column = 'ProductKey', \n"," from_cardinality = 'Many', to_cardinality = 'One')"]},{"cell_type":"markdown","id":"3cc7f11e","metadata":{},"source":["#### Add a table with an M partition to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0f5dd66a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_m_partition(table_name = table_name, partition_name = table_name, expression = 'let....')"]},{"cell_type":"markdown","id":"ea389123","metadata":{},"source":["#### Add a table with an entity partition to a Direct Lake semantic model "]},{"cell_type":"code","execution_count":null,"id":"f75387d1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_entity_partition(table_name = table_name, entity_name = table_name)"]},{"cell_type":"markdown","id":"e74d0f54","metadata":{},"source":["#### Add a calculated table (and columns) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"934f7315","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_calculated_table(name = table_name, expression = \"DISTINCT('Product'[Color])\")\n"," tom.add_calculated_table_column(table_name = table_name, column_name = 'Color', source_column = \"'Product[Color]\", data_type = 'String')"]},{"cell_type":"markdown","id":"0e7088b7","metadata":{},"source":["#### Add role(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"ad60ebb9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_role(role_name = 'Reader')"]},{"cell_type":"markdown","id":"c541f81a","metadata":{},"source":["#### Set row level security (RLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"98603a08","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_rls(role_name ='Reader', table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"code","execution_count":null,"id":"effea009","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," tom.set_rls(role_name = r.Name, table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"markdown","id":"7fa7a03c","metadata":{},"source":["#### Set object level security (OLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"dd0def9d","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_ols(role_name = 'Reader', table_name = 'Product', column_name = 'Size', permission = 'None')"]},{"cell_type":"code","execution_count":null,"id":"7a389dc7","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.set_ols(role_name = r.Name, table_name = t.Name, column_name = 'Size', permission = 'None')"]},{"cell_type":"markdown","id":"d0f7ccd1","metadata":{},"source":["#### Add calculation groups and calculation items to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"97f4708b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_group(name = 'MyCalcGroup')"]},{"cell_type":"code","execution_count":null,"id":"fef68832","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"code","execution_count":null,"id":"c7653dcc","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'MyCalcGroup':\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"markdown","id":"c6450c74","metadata":{},"source":["#### Add translations to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"2b616b90","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_translation(language = 'it-IT')"]},{"cell_type":"code","execution_count":null,"id":"dc24c200","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_translation(object = tom.model.Tables['Product'], language = 'it-IT', property = 'Name', value = 'Produtto')"]},{"cell_type":"markdown","id":"3048cc95","metadata":{},"source":["#### Add a [Field Parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0a94af94","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_field_parameter(table_name = 'Parameter', objects = \"'Product'[Color], [Sales Amount], 'Geography'[Country]\")"]},{"cell_type":"markdown","id":"95aac09a","metadata":{},"source":["#### Remove an object(s) from a semantic model"]},{"cell_type":"code","execution_count":null,"id":"1e2572a8","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.remove_object(object = t.Columns['Size'])\n"," tom.remove_object(object = t.Hierarchies['Product Hierarchy'])"]},{"cell_type":"code","execution_count":null,"id":"bc453177","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.remove_object(object = tom.model.Tables['Product'].Columns['Size'])\n"," tom.remove_object(object = tom.model.Tables['Product'].Hierarchies['Product Hierarchy'])"]},{"cell_type":"markdown","id":"e0d0cb9e","metadata":{},"source":["### Custom functions to loop through non-top-level objects in a semantic model"]},{"cell_type":"code","execution_count":null,"id":"cbe3b1a3","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," print(c.Name)"]},{"cell_type":"code","execution_count":null,"id":"3f643e66","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for m in tom.all_measures():\n"," print(m.Name)"]},{"cell_type":"code","execution_count":null,"id":"ed1cde0f","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for p in tom.all_partitions():\n"," print(p.Name)"]},{"cell_type":"code","execution_count":null,"id":"f48014ae","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for h in tom.all_hierarchies():\n"," print(h.Name)"]},{"cell_type":"code","execution_count":null,"id":"9f5e7b72","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for ci in tom.all_calculation_items():\n"," print(ci.Name)"]},{"cell_type":"code","execution_count":null,"id":"3cd9ebc1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for l in tom.all_levels():\n"," print(l.Name)"]},{"cell_type":"code","execution_count":null,"id":"12c58bad","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for rls in tom.all_rls():\n"," print(rls.Name)"]},{"cell_type":"markdown","id":"1a294bd2","metadata":{},"source":["### See Vertipaq Analyzer stats"]},{"cell_type":"code","execution_count":null,"id":"469660e9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_vertipaq_annotations()\n","\n"," for t in tom.model.Tables:\n"," rc = tom.row_count(object = t)\n"," print(t.Name + ' : ' + str(rc))\n"," for c in t.Columns:\n"," col_size = tom.total_size(column = c)\n"," print(labs.format_dax_object_name(t.Name, c.Name) + ' : ' + str(col_size))"]},{"cell_type":"markdown","id":"1ab26dfd","metadata":{},"source":["### 'UsedIn' functions"]},{"cell_type":"code","execution_count":null,"id":"412bf287","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for h in tom.used_in_hierarchies(column = c):\n"," print(full_name + ' : ' + h.Name)"]},{"cell_type":"code","execution_count":null,"id":"76556900","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for r in tom.used_in_relationships(object = c):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(full_name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"4d9ec24e","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," for r in tom.used_in_relationships(object = t):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(t.Name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"82251336","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," dep = labs.get_model_calc_dependencies(dataset = dataset, workspace=workspace)\n"," for o in tom.used_in_rls(object = tom.model.Tables['Product'].Columns['Color'], dependencies=dep):\n"," print(o.Name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -7,8 +7,9 @@ name="semantic-link-labs"
7
7
  authors = [
8
8
  { name = "Microsoft Corporation" },
9
9
  ]
10
- version="0.5.0"
11
- description="Semantic Link Labs project"
10
+ version="0.7.0"
11
+ description="Semantic Link Labs for Microsoft Fabric"
12
+ readme="README.md"
12
13
  requires-python=">=3.10,<3.12"
13
14
  classifiers = [
14
15
  "Development Status :: 3 - Alpha",
@@ -22,11 +23,18 @@ classifiers = [
22
23
  license= { text = "MIT License" }
23
24
 
24
25
  dependencies = [
25
- "semantic-link-sempy>=0.7.5",
26
+ "semantic-link-sempy>=0.7.7",
26
27
  "anytree",
27
- "powerbiclient"
28
+ "powerbiclient",
29
+ "polib",
28
30
  ]
29
31
 
32
+ [tool.setuptools.packages.find]
33
+ where = ["src"]
34
+
35
+ [tool.setuptools.package-data]
36
+ "*" = ["*.*"]
37
+
30
38
  [project.optional-dependencies]
31
39
  test = [
32
40
  "pytest>=8.2.1",
@@ -36,7 +44,7 @@ test = [
36
44
  Repository = "https://github.com/microsoft/semantic-link-labs.git"
37
45
 
38
46
  [[tool.mypy.overrides]]
39
- module = "sempy.*,Microsoft.*,System.*,anytree.*,powerbiclient.*,synapse.ml.services.*"
47
+ module = "sempy.*,Microsoft.*,System.*,anytree.*,powerbiclient.*,synapse.ml.services.*,polib.*"
40
48
  ignore_missing_imports = true
41
49
 
42
50
  [tool.flake8]
@@ -0,0 +1,148 @@
1
+ Metadata-Version: 2.1
2
+ Name: semantic-link-labs
3
+ Version: 0.7.0
4
+ Summary: Semantic Link Labs for Microsoft Fabric
5
+ Author: Microsoft Corporation
6
+ License: MIT License
7
+ Project-URL: Repository, https://github.com/microsoft/semantic-link-labs.git
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Intended Audience :: Education
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3 :: Only
14
+ Classifier: Framework :: Jupyter
15
+ Requires-Python: <3.12,>=3.10
16
+ Description-Content-Type: text/markdown
17
+ License-File: LICENSE
18
+ Requires-Dist: semantic-link-sempy>=0.7.7
19
+ Requires-Dist: anytree
20
+ Requires-Dist: powerbiclient
21
+ Requires-Dist: polib
22
+ Provides-Extra: test
23
+ Requires-Dist: pytest>=8.2.1; extra == "test"
24
+
25
+ # Semantic Link Labs
26
+
27
+ [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
28
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
30
+ [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
31
+
32
+ ---
33
+ [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
34
+ ---
35
+
36
+ This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
37
+
38
+ Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
39
+
40
+ If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
41
+
42
+ If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
43
+
44
+ ## Install the library in a Fabric notebook
45
+ ```python
46
+ %pip install semantic-link-labs
47
+ ```
48
+
49
+ ## Once installed, run this code to import the library into your notebook
50
+ ```python
51
+ import sempy_labs as labs
52
+ from sempy_labs import migration, directlake
53
+ from sempy_labs import lakehouse as lake
54
+ from sempy_labs import report as rep
55
+ from sempy_labs.tom import connect_semantic_model
56
+ ```
57
+
58
+ ## Load semantic-link-labs into a custom [Fabric environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment)
59
+ An even better way to ensure the semantic-link-labs library is available in your workspace/notebooks is to load it as a library in a custom Fabric environment. If you do this, you will not have to run the above '%pip install' code every time in your notebook. Please follow the steps below.
60
+
61
+ #### Create a custom environment
62
+ 1. Navigate to your Fabric workspace
63
+ 2. Click 'New' -> More options
64
+ 3. Within 'Data Science', click 'Environment'
65
+ 4. Name your environment, click 'Create'
66
+
67
+ #### Add semantic-link-labs as a library to the environment
68
+ 1. Within 'Public libraries', click 'Add from PyPI'
69
+ 2. Enter 'semantic-link-labs'.
70
+ 3. Click 'Save' at the top right of the screen
71
+ 4. Click 'Publish' at the top right of the screen
72
+ 5. Click 'Publish All'
73
+
74
+ #### Update your notebook to use the new environment (*must wait for the environment to finish publishing*)
75
+ 1. Navigate to your Notebook
76
+ 2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
77
+
78
+ ---
79
+ ## Direct Lake migration
80
+
81
+ The following process automates the migration of an import/DirectQuery model to a new [Direct Lake](https://learn.microsoft.com/power-bi/enterprise/directlake-overview) model. The first step is specifically applicable to models which use Power Query to perform data transformations. If your model does not use Power Query, you must migrate the base tables used in your semantic model to a Fabric lakehouse.
82
+
83
+ Check out [Nikola Ilic](https://twitter.com/DataMozart)'s terrific [blog post](https://data-mozart.com/migrate-existing-power-bi-semantic-models-to-direct-lake-a-step-by-step-guide/) on this topic!
84
+
85
+ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) on this topic!
86
+
87
+ [![Direct Lake Migration Video](https://img.youtube.com/vi/gGIxMrTVyyI/0.jpg)](https://www.youtube.com/watch?v=gGIxMrTVyyI?t=495)
88
+
89
+ ### Prerequisites
90
+
91
+ * Make sure you [enable XMLA Read/Write](https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#enable-xmla-read-write) for your capacity
92
+ * Make sure you have a [lakehouse](https://learn.microsoft.com/fabric/onelake/create-lakehouse-onelake#create-a-lakehouse) in a Fabric workspace
93
+ * Enable the following [setting](https://learn.microsoft.com/power-bi/transform-model/service-edit-data-models#enable-the-preview-feature): Workspace -> Workspace Settings -> General -> Data model settings -> Users can edit data models in the Power BI service
94
+
95
+ ### Instructions
96
+
97
+ 1. Download this [notebook](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Migration%20to%20Direct%20Lake.ipynb).
98
+ 2. Make sure you are in the ['Data Engineering' persona](https://learn.microsoft.com/fabric/get-started/microsoft-fabric-overview#components-of-microsoft-fabric). Click the icon at the bottom left corner of your Workspace screen and select 'Data Engineering'
99
+ 3. In your workspace, select 'New -> Import notebook' and import the notebook from step 1.
100
+ 4. [Add your lakehouse](https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse) to your Fabric notebook
101
+ 5. Follow the instructions within the notebook.
102
+
103
+ ### The migration process
104
+
105
+ > [!NOTE]
106
+ > The first 4 steps are only necessary if you have logic in Power Query. Otherwise, you will need to migrate your semantic model source tables to lakehouse tables.
107
+
108
+ 1. The first step of the notebook creates a Power Query Template (.pqt) file which eases the migration of Power Query logic to Dataflows Gen2.
109
+ 2. After the .pqt file is created, sync files from your [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222)
110
+ 3. Navigate to your lakehouse (this is critical!). From your lakehouse, create a new Dataflows Gen2, and import the Power Query Template file. Doing this step from your lakehouse will automatically set the destination for all tables to this lakehouse (instead of having to manually map each one).
111
+ 4. Publish the Dataflow Gen2 and wait for it to finish creating the delta lake tables in your lakehouse.
112
+ 5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
113
+
114
+ > [!NOTE]
115
+ > As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
116
+
117
+ 6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
118
+
119
+ ### Completing these steps will do the following:
120
+ * Offload your Power Query logic to Dataflows Gen2 inside of Fabric (where it can be maintained and development can continue).
121
+ * Dataflows Gen2 will create delta tables in your Fabric lakehouse. These tables can then be used for your Direct Lake model.
122
+ * Create a new semantic model in Direct Lake mode containing all the standard tables and columns, calculation groups, measures, relationships, hierarchies, roles, row level security, perspectives, and translations from your original semantic model.
123
+ * Viable calculated tables are migrated to the new semantic model as data tables. Delta tables are dynamically generated in the lakehouse to support the Direct Lake model. The calculated table DAX logic is stored as model annotations in the new semantic model.
124
+ * Field parameters are migrated to the new semantic model as they were in the original semantic model (as calculated tables). Any calculated columns used in field parameters are automatically removed in the new semantic model's field parameter(s).
125
+ * Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
126
+ * Reports used by your original semantic model will be rebinded to your new semantic model.
127
+
128
+ ## Contributing
129
+
130
+ This project welcomes contributions and suggestions. Most contributions require you to agree to a
131
+ Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
132
+ the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
133
+
134
+ When you submit a pull request, a CLA bot will automatically determine whether you need to provide
135
+ a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
136
+ provided by the bot. You will only need to do this once across all repos using our CLA.
137
+
138
+ This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
139
+ For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
140
+ contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
141
+
142
+ ## Trademarks
143
+
144
+ This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
145
+ trademarks or logos is subject to and must follow
146
+ [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
147
+ Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
148
+ Any use of third-party trademarks or logos are subject to those third-party's policies.