semantic-link-labs 0.7.0__tar.gz → 0.7.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (143) hide show
  1. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/PKG-INFO +2 -2
  2. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/README.md +1 -1
  3. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/docs/requirements.txt +2 -1
  4. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/docs/source/conf.py +1 -1
  5. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/pyproject.toml +1 -1
  6. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/semantic_link_labs.egg-info/PKG-INFO +2 -2
  7. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_helper_functions.py +5 -2
  8. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_list_functions.py +99 -61
  9. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_model_bpa_bulk.py +10 -6
  10. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_model_bpa_rules.py +15 -4
  11. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_query_scale_out.py +29 -31
  12. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/lakehouse/__init__.py +2 -0
  13. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/lakehouse/_lakehouse.py +63 -7
  14. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  15. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  16. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.github/ISSUE_TEMPLATE/issue--question---advice-needed.md +0 -0
  17. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.github/workflows/build.yaml +0 -0
  18. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.github/workflows/codeql.yaml +0 -0
  19. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.gitignore +0 -0
  20. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.readthedocs.yaml +0 -0
  21. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/.vscode/settings.json +0 -0
  22. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/CODE_OF_CONDUCT.md +0 -0
  23. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/LICENSE +0 -0
  24. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/SECURITY.md +0 -0
  25. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/SUPPORT.md +0 -0
  26. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/docs/Makefile +0 -0
  27. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/docs/make.bat +0 -0
  28. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/docs/source/index.rst +0 -0
  29. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/docs/source/modules.rst +0 -0
  30. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/environment.yml +0 -0
  31. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/notebooks/Best Practice Analyzer Report.ipynb +0 -0
  32. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/notebooks/Migration to Direct Lake.ipynb +0 -0
  33. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/notebooks/Model Optimization.ipynb +0 -0
  34. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/notebooks/Query Scale Out.ipynb +0 -0
  35. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/notebooks/Semantic Model Refresh.ipynb +0 -0
  36. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/notebooks/Tabular Object Model.ipynb +0 -0
  37. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/setup.cfg +0 -0
  38. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/semantic_link_labs.egg-info/SOURCES.txt +0 -0
  39. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
  40. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/semantic_link_labs.egg-info/requires.txt +0 -0
  41. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
  42. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/__init__.py +0 -0
  43. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_ai.py +0 -0
  44. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_am-ET.po +0 -0
  45. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_ar-AE.po +0 -0
  46. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_cs-CZ.po +0 -0
  47. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_da-DK.po +0 -0
  48. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_de-DE.po +0 -0
  49. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_el-GR.po +0 -0
  50. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_es-ES.po +0 -0
  51. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_fa-IR.po +0 -0
  52. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_fr-FR.po +0 -0
  53. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_ga-IE.po +0 -0
  54. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_he-IL.po +0 -0
  55. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_hi-IN.po +0 -0
  56. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_hu-HU.po +0 -0
  57. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_is-IS.po +0 -0
  58. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_it-IT.po +0 -0
  59. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_ja-JP.po +0 -0
  60. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_nl-NL.po +0 -0
  61. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_pl-PL.po +0 -0
  62. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_pt-BR.po +0 -0
  63. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_pt-PT.po +0 -0
  64. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_ru-RU.po +0 -0
  65. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_ta-IN.po +0 -0
  66. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_te-IN.po +0 -0
  67. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_th-TH.po +0 -0
  68. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_zh-CN.po +0 -0
  69. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_bpa_translation/_translations_zu-ZA.po +0 -0
  70. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_clear_cache.py +0 -0
  71. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_connections.py +0 -0
  72. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_dax.py +0 -0
  73. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_generate_semantic_model.py +0 -0
  74. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_icons.py +0 -0
  75. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_model_auto_build.py +0 -0
  76. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_model_bpa.py +0 -0
  77. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_model_dependencies.py +0 -0
  78. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_one_lake_integration.py +0 -0
  79. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_refresh_semantic_model.py +0 -0
  80. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_translations.py +0 -0
  81. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/_vertipaq.py +0 -0
  82. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/__init__.py +0 -0
  83. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_directlake_schema_compare.py +0 -0
  84. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_directlake_schema_sync.py +0 -0
  85. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_dl_helper.py +0 -0
  86. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +0 -0
  87. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_get_shared_expression.py +0 -0
  88. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_guardrails.py +0 -0
  89. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +0 -0
  90. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +0 -0
  91. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +0 -0
  92. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_update_directlake_partition_entity.py +0 -0
  93. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/directlake/_warm_cache.py +0 -0
  94. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +0 -0
  95. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +0 -0
  96. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/lakehouse/_shortcuts.py +0 -0
  97. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/__init__.py +0 -0
  98. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_create_pqt_file.py +0 -0
  99. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +0 -0
  100. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +0 -0
  101. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +0 -0
  102. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +0 -0
  103. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_migration_validation.py +0 -0
  104. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/migration/_refresh_calc_tables.py +0 -0
  105. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_BPAReportTemplate.json +0 -0
  106. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/__init__.py +0 -0
  107. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -0
  108. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/.platform +0 -0
  109. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +0 -0
  110. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +0 -0
  111. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +0 -0
  112. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +0 -0
  113. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +0 -0
  114. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +0 -0
  115. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +0 -0
  116. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +0 -0
  117. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +0 -0
  118. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +0 -0
  119. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +0 -0
  120. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +0 -0
  121. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +0 -0
  122. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +0 -0
  123. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +0 -0
  124. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +0 -0
  125. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +0 -0
  126. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +0 -0
  127. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +0 -0
  128. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +0 -0
  129. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +0 -0
  130. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +0 -0
  131. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +0 -0
  132. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +0 -0
  133. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/report.json +0 -0
  134. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition/version.json +0 -0
  135. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_bpareporttemplate/definition.pbir +0 -0
  136. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_generate_report.py +0 -0
  137. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_report_functions.py +0 -0
  138. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/report/_report_rebind.py +0 -0
  139. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/tom/__init__.py +0 -0
  140. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/src/sempy_labs/tom/_model.py +0 -0
  141. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/tests/__init__.py +0 -0
  142. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/tests/test_shortcuts.py +0 -0
  143. {semantic_link_labs-0.7.0 → semantic_link_labs-0.7.2}/tests/test_tom.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.7.0
3
+ Version: 0.7.2
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -25,7 +25,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
25
25
  # Semantic Link Labs
26
26
 
27
27
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
28
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
28
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
29
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
30
30
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
31
31
 
@@ -1,7 +1,7 @@
1
1
  # Semantic Link Labs
2
2
 
3
3
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
4
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
4
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
5
5
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
6
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
7
7
 
@@ -8,4 +8,5 @@ azure-keyvault-secrets
8
8
  azure-storage-file-datalake==12.3.1
9
9
  azure-storage-blob>=12.9.0
10
10
  anytree
11
- IPython
11
+ IPython
12
+ polib
@@ -13,7 +13,7 @@ sys.path.insert(0, os.path.abspath('../../src/'))
13
13
  project = 'semantic-link-labs'
14
14
  copyright = '2024, Microsoft and community'
15
15
  author = 'Microsoft and community'
16
- release = '0.7.0'
16
+ release = '0.7.2'
17
17
 
18
18
  # -- General configuration ---------------------------------------------------
19
19
  # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
@@ -7,7 +7,7 @@ name="semantic-link-labs"
7
7
  authors = [
8
8
  { name = "Microsoft Corporation" },
9
9
  ]
10
- version="0.7.0"
10
+ version="0.7.2"
11
11
  description="Semantic Link Labs for Microsoft Fabric"
12
12
  readme="README.md"
13
13
  requires-python=">=3.10,<3.12"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.7.0
3
+ Version: 0.7.2
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -25,7 +25,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
25
25
  # Semantic Link Labs
26
26
 
27
27
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
28
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
28
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
29
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
30
30
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
31
31
 
@@ -11,6 +11,7 @@ from typing import Optional, Tuple, List
11
11
  from uuid import UUID
12
12
  import sempy_labs._icons as icons
13
13
  from sempy.fabric.exceptions import FabricHTTPException
14
+ import urllib.parse
14
15
 
15
16
 
16
17
  def create_abfss_path(
@@ -681,7 +682,8 @@ def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, s
681
682
  """
682
683
 
683
684
  workspace = fabric.resolve_workspace_name(workspace)
684
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
685
+ filter_condition = urllib.parse.quote(workspace)
686
+ dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
685
687
  capacity_id = dfW["Capacity Id"].iloc[0]
686
688
  dfC = fabric.list_capacities()
687
689
  dfC_filt = dfC[dfC["Id"] == capacity_id]
@@ -711,7 +713,8 @@ def get_capacity_id(workspace: Optional[str] = None) -> UUID:
711
713
  """
712
714
 
713
715
  workspace = fabric.resolve_workspace_name(workspace)
714
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
716
+ filter_condition = urllib.parse.quote(workspace)
717
+ dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
715
718
  if len(dfW) == 0:
716
719
  raise ValueError(f"{icons.red_dot} The '{workspace}' does not exist'.")
717
720
 
@@ -8,12 +8,11 @@ from sempy_labs._helper_functions import (
8
8
  _decode_b64,
9
9
  pagination,
10
10
  lro,
11
+ resolve_item_type,
11
12
  )
12
13
  import pandas as pd
13
14
  import base64
14
15
  import requests
15
- import time
16
- import json
17
16
  from pyspark.sql import SparkSession
18
17
  from typing import Optional
19
18
  import sempy_labs._icons as icons
@@ -1529,7 +1528,7 @@ def list_shortcuts(
1529
1528
  lakehouse: Optional[str] = None, workspace: Optional[str] = None
1530
1529
  ) -> pd.DataFrame:
1531
1530
  """
1532
- Shows all shortcuts which exist in a Fabric lakehouse.
1531
+ Shows all shortcuts which exist in a Fabric lakehouse and their properties.
1533
1532
 
1534
1533
  Parameters
1535
1534
  ----------
@@ -1551,71 +1550,84 @@ def list_shortcuts(
1551
1550
 
1552
1551
  if lakehouse is None:
1553
1552
  lakehouse_id = fabric.get_lakehouse_id()
1554
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
1555
1553
  else:
1556
1554
  lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
1557
1555
 
1556
+ client = fabric.FabricRestClient()
1557
+
1558
1558
  df = pd.DataFrame(
1559
1559
  columns=[
1560
1560
  "Shortcut Name",
1561
1561
  "Shortcut Path",
1562
- "Source",
1563
- "Source Lakehouse Name",
1562
+ "Source Type",
1563
+ "Source Workspace Id",
1564
1564
  "Source Workspace Name",
1565
- "Source Path",
1566
- "Source Connection ID",
1567
- "Source Location",
1568
- "Source SubPath",
1565
+ "Source Item Id",
1566
+ "Source Item Name",
1567
+ "Source Item Type",
1568
+ "OneLake Path",
1569
+ "Connection Id",
1570
+ "Location",
1571
+ "Bucket",
1572
+ "SubPath",
1569
1573
  ]
1570
1574
  )
1571
1575
 
1572
- client = fabric.FabricRestClient()
1573
1576
  response = client.get(
1574
1577
  f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
1575
1578
  )
1579
+
1576
1580
  if response.status_code != 200:
1577
1581
  raise FabricHTTPException(response)
1578
1582
 
1579
1583
  responses = pagination(client, response)
1580
1584
 
1581
1585
  for r in responses:
1582
- for s in r.get("value", []):
1583
- shortcutName = s.get("name")
1584
- shortcutPath = s.get("path")
1585
- source = list(s["target"].keys())[0]
1586
- (
1587
- sourceLakehouseName,
1588
- sourceWorkspaceName,
1589
- sourcePath,
1590
- connectionId,
1591
- location,
1592
- subpath,
1593
- ) = (None, None, None, None, None, None)
1594
- if source == "oneLake":
1595
- sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId")
1596
- sourcePath = s.get("target", {}).get(source, {}).get("path")
1597
- sourceWorkspaceId = (
1598
- s.get("target", {}).get(source, {}).get("workspaceId")
1599
- )
1600
- sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
1601
- sourceLakehouseName = resolve_lakehouse_name(
1602
- sourceLakehouseId, sourceWorkspaceName
1603
- )
1604
- else:
1605
- connectionId = s.get("target", {}).get(source, {}).get("connectionId")
1606
- location = s.get("target", {}).get(source, {}).get("location")
1607
- subpath = s.get("target", {}).get(source, {}).get("subpath")
1586
+ for i in r.get("value", []):
1587
+ tgt = i.get("target", {})
1588
+ s3_compat = tgt.get("s3Compatible", {})
1589
+ gcs = tgt.get("googleCloudStorage", {})
1590
+ eds = tgt.get("externalDataShare", {})
1591
+ connection_id = (
1592
+ s3_compat.get("connectionId")
1593
+ or gcs.get("connectionId")
1594
+ or eds.get("connectionId")
1595
+ or None
1596
+ )
1597
+ location = s3_compat.get("location") or gcs.get("location") or None
1598
+ sub_path = s3_compat.get("subpath") or gcs.get("subpath") or None
1599
+ source_workspace_id = tgt.get("oneLake", {}).get("workspaceId")
1600
+ source_item_id = tgt.get("oneLake", {}).get("itemId")
1601
+ source_workspace_name = (
1602
+ fabric.resolve_workspace_name(source_workspace_id)
1603
+ if source_workspace_id is not None
1604
+ else None
1605
+ )
1608
1606
 
1609
1607
  new_data = {
1610
- "Shortcut Name": shortcutName,
1611
- "Shortcut Path": shortcutPath,
1612
- "Source": source,
1613
- "Source Lakehouse Name": sourceLakehouseName,
1614
- "Source Workspace Name": sourceWorkspaceName,
1615
- "Source Path": sourcePath,
1616
- "Source Connection ID": connectionId,
1617
- "Source Location": location,
1618
- "Source SubPath": subpath,
1608
+ "Shortcut Name": i.get("name"),
1609
+ "Shortcut Path": i.get("path"),
1610
+ "Source Type": tgt.get("type"),
1611
+ "Source Workspace Id": source_workspace_id,
1612
+ "Source Workspace Name": source_workspace_name,
1613
+ "Source Item Id": source_item_id,
1614
+ "Source Item Name": (
1615
+ fabric.resolve_item_name(
1616
+ source_item_id, workspace=source_workspace_name
1617
+ )
1618
+ if source_item_id is not None
1619
+ else None
1620
+ ),
1621
+ "Source Item Type": (
1622
+ resolve_item_type(source_item_id, workspace=source_workspace_name)
1623
+ if source_item_id is not None
1624
+ else None
1625
+ ),
1626
+ "OneLake Path": tgt.get("oneLake", {}).get("path"),
1627
+ "Connection Id": connection_id,
1628
+ "Location": location,
1629
+ "Bucket": s3_compat.get("bucket"),
1630
+ "SubPath": sub_path,
1619
1631
  }
1620
1632
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1621
1633
 
@@ -1722,9 +1734,9 @@ def create_custom_pool(
1722
1734
  min_node_count : int
1723
1735
  The `minimum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1724
1736
  max_node_count : int
1725
- The `maximum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1737
+ The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1726
1738
  min_executors : int
1727
- The `minimum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1739
+ The `minimum executors <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1728
1740
  max_executors : int
1729
1741
  The `maximum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1730
1742
  node_family : str, default='MemoryOptimized'
@@ -1799,10 +1811,10 @@ def update_custom_pool(
1799
1811
  The `minimum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1800
1812
  Defaults to None which keeps the existing property setting.
1801
1813
  max_node_count : int, default=None
1802
- The `maximum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1814
+ The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1803
1815
  Defaults to None which keeps the existing property setting.
1804
1816
  min_executors : int, default=None
1805
- The `minimum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1817
+ The `minimum executors <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1806
1818
  Defaults to None which keeps the existing property setting.
1807
1819
  max_executors : int, default=None
1808
1820
  The `maximum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
@@ -2092,7 +2104,7 @@ def update_spark_settings(
2092
2104
  `Default pool <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties>`_ for workspace.
2093
2105
  Defaults to None which keeps the existing property setting.
2094
2106
  max_node_count : int, default=None
2095
- The `maximum node count <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
2107
+ The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
2096
2108
  Defaults to None which keeps the existing property setting.
2097
2109
  max_executors : int, default=None
2098
2110
  The `maximum executors <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
@@ -2161,7 +2173,10 @@ def update_spark_settings(
2161
2173
 
2162
2174
 
2163
2175
  def add_user_to_workspace(
2164
- email_address: str, role_name: str, workspace: Optional[str] = None
2176
+ email_address: str,
2177
+ role_name: str,
2178
+ principal_type: Optional[str] = "User",
2179
+ workspace: Optional[str] = None,
2165
2180
  ):
2166
2181
  """
2167
2182
  Adds a user to a workspace.
@@ -2172,13 +2187,12 @@ def add_user_to_workspace(
2172
2187
  The email address of the user.
2173
2188
  role_name : str
2174
2189
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
2190
+ principal_type : str, default='User'
2191
+ The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
2175
2192
  workspace : str, default=None
2176
2193
  The name of the workspace.
2177
2194
  Defaults to None which resolves to the workspace of the attached lakehouse
2178
2195
  or if no lakehouse attached, resolves to the workspace of the notebook.
2179
-
2180
- Returns
2181
- -------
2182
2196
  """
2183
2197
 
2184
2198
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -2190,10 +2204,21 @@ def add_user_to_workspace(
2190
2204
  f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
2191
2205
  )
2192
2206
  plural = "n" if role_name == "Admin" else ""
2207
+ principal_types = ["App", "Group", "None", "User"]
2208
+ principal_type = principal_type.capitalize()
2209
+ if principal_type not in principal_types:
2210
+ raise ValueError(
2211
+ f"{icons.red_dot} Invalid princpal type. Valid options: {principal_types}."
2212
+ )
2193
2213
 
2194
2214
  client = fabric.PowerBIRestClient()
2195
2215
 
2196
- request_body = {"emailAddress": email_address, "groupUserAccessRight": role_name}
2216
+ request_body = {
2217
+ "emailAddress": email_address,
2218
+ "groupUserAccessRight": role_name,
2219
+ "principalType": principal_type,
2220
+ "identifier": email_address,
2221
+ }
2197
2222
 
2198
2223
  response = client.post(
2199
2224
  f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body
@@ -2236,7 +2261,10 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
2236
2261
 
2237
2262
 
2238
2263
  def update_workspace_user(
2239
- email_address: str, role_name: str, workspace: Optional[str] = None
2264
+ email_address: str,
2265
+ role_name: str,
2266
+ principal_type: Optional[str] = "User",
2267
+ workspace: Optional[str] = None,
2240
2268
  ):
2241
2269
  """
2242
2270
  Updates a user's role within a workspace.
@@ -2247,13 +2275,12 @@ def update_workspace_user(
2247
2275
  The email address of the user.
2248
2276
  role_name : str
2249
2277
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
2278
+ principal_type : str, default='User'
2279
+ The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
2250
2280
  workspace : str, default=None
2251
2281
  The name of the workspace.
2252
2282
  Defaults to None which resolves to the workspace of the attached lakehouse
2253
2283
  or if no lakehouse attached, resolves to the workspace of the notebook.
2254
-
2255
- Returns
2256
- -------
2257
2284
  """
2258
2285
 
2259
2286
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -2264,8 +2291,19 @@ def update_workspace_user(
2264
2291
  raise ValueError(
2265
2292
  f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
2266
2293
  )
2294
+ principal_types = ["App", "Group", "None", "User"]
2295
+ principal_type = principal_type.capitalize()
2296
+ if principal_type not in principal_types:
2297
+ raise ValueError(
2298
+ f"{icons.red_dot} Invalid princpal type. Valid options: {principal_types}."
2299
+ )
2267
2300
 
2268
- request_body = {"emailAddress": email_address, "groupUserAccessRight": role_name}
2301
+ request_body = {
2302
+ "emailAddress": email_address,
2303
+ "groupUserAccessRight": role_name,
2304
+ "principalType": principal_type,
2305
+ "identifier": email_address,
2306
+ }
2269
2307
 
2270
2308
  client = fabric.PowerBIRestClient()
2271
2309
  response = client.put(f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body)
@@ -21,6 +21,7 @@ def run_model_bpa_bulk(
21
21
  extended: Optional[bool] = False,
22
22
  language: Optional[str] = None,
23
23
  workspace: Optional[str | List[str]] = None,
24
+ skip_models: Optional[str | List[str]] = ["ModelBPA", "Fabric Capacity Metrics"],
24
25
  ):
25
26
  """
26
27
  Runs the semantic model Best Practice Analyzer across all semantic models in a workspace (or all accessible workspaces).
@@ -41,18 +42,22 @@ def run_model_bpa_bulk(
41
42
  workspace : str | List[str], default=None
42
43
  The workspace or list of workspaces to scan.
43
44
  Defaults to None which scans all accessible workspaces.
44
-
45
- Returns
46
- -------
45
+ skip_models : str | List[str], default=['ModelBPA', 'Fabric Capacity Metrics']
46
+ The semantic models to always skip when running this analysis.
47
47
  """
48
48
 
49
49
  import pyspark.sql.functions as F
50
50
 
51
51
  if not lakehouse_attached():
52
52
  raise ValueError(
53
- "No lakehouse is attached to this notebook. Must attach a lakehouse to the notebook."
53
+ f"{icons.red_dot} No lakehouse is attached to this notebook. Must attach a lakehouse to the notebook."
54
54
  )
55
55
 
56
+ if isinstance(skip_models, str):
57
+ skip_models = [skip_models]
58
+
59
+ skip_models.extend(["ModelBPA", "Fabric Capacity Metrics"])
60
+
56
61
  cols = [
57
62
  "Capacity Name",
58
63
  "Capacity Id",
@@ -113,8 +118,7 @@ def run_model_bpa_bulk(
113
118
  or set(["Lakehouse", "SemanticModel"]).issubset(set(x["Type"]))
114
119
  )
115
120
  default_semantic_models = filtered_df["Display Name"].unique().tolist()
116
- # Skip ModelBPA :)
117
- skip_models = default_semantic_models + [icons.model_bpa_name]
121
+ skip_models.extend(default_semantic_models)
118
122
  dfD_filt = dfD[~dfD["Dataset Name"].isin(skip_models)]
119
123
 
120
124
  if len(dfD_filt) > 0:
@@ -135,6 +135,17 @@ def model_bpa_rules(
135
135
  "Setting the 'Data Coverage Definition' property may lead to better performance because the engine knows when it can only query the import-portion of the table and when it needs to query the DirectQuery portion of the table.",
136
136
  "https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions",
137
137
  ),
138
+ (
139
+ "Performance",
140
+ "Model",
141
+ "Warning",
142
+ "Dual mode is only relevant for dimension tables if DirectQuery is used for the corresponding fact table",
143
+ lambda obj: not any(
144
+ p.Mode == TOM.ModeType.DirectQuery for p in tom.all_partitions()
145
+ )
146
+ and any(p.Mode == TOM.ModeType.Dual for p in tom.all_partitions()),
147
+ "Only use Dual mode for dimension tables/partitions where a corresponding fact table is in DirectQuery. Using Dual mode in other circumstances (i.e. rest of the model is in Import mode) may lead to performance issues especially if the number of measures in the model is high.",
148
+ ),
138
149
  (
139
150
  "Performance",
140
151
  "Table",
@@ -590,13 +601,13 @@ def model_bpa_rules(
590
601
  re.search(
591
602
  r"USERELATIONSHIP\s*\(\s*\'*"
592
603
  + obj.FromTable.Name
593
- + "'*\["
604
+ + r"'*\["
594
605
  + obj.FromColumn.Name
595
- + "\]\s*,\s*'*"
606
+ + r"\]\s*,\s*'*"
596
607
  + obj.ToTable.Name
597
- + "'*\["
608
+ + r"'*\["
598
609
  + obj.ToColumn.Name
599
- + "\]",
610
+ + r"\]",
600
611
  m.Expression,
601
612
  flags=re.IGNORECASE,
602
613
  )
@@ -21,10 +21,6 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
21
21
  The Fabric workspace name.
22
22
  Defaults to None which resolves to the workspace of the attached lakehouse
23
23
  or if no lakehouse attached, resolves to the workspace of the notebook.
24
-
25
- Returns
26
- -------
27
-
28
24
  """
29
25
 
30
26
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group
@@ -63,7 +59,6 @@ def qso_sync_status(
63
59
  -------
64
60
  Tuple[pandas.DataFrame, pandas.DataFrame]
65
61
  2 pandas dataframes showing the query scale-out sync status.
66
-
67
62
  """
68
63
 
69
64
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/get-query-scale-out-sync-status-in-group
@@ -161,7 +156,6 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
161
156
  -------
162
157
  pandas.DataFrame
163
158
  A pandas dataframe showing the current query scale out settings.
164
-
165
159
  """
166
160
 
167
161
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -177,6 +171,7 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
177
171
  raise FabricHTTPException(response)
178
172
 
179
173
  df = list_qso_settings(dataset=dataset, workspace=workspace)
174
+
180
175
  print(
181
176
  f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace."
182
177
  )
@@ -210,14 +205,20 @@ def set_qso(
210
205
  -------
211
206
  pandas.DataFrame
212
207
  A pandas dataframe showing the current query scale-out settings.
213
-
214
208
  """
215
209
 
210
+ from sempy_labs._helper_functions import is_default_semantic_model
211
+
216
212
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/update-dataset-in-group
217
213
 
218
214
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
219
215
  dataset_id = resolve_dataset_id(dataset, workspace)
220
216
 
217
+ if is_default_semantic_model(dataset=dataset, workspace=workspace):
218
+ raise ValueError(
219
+ f"{icons.red_dot} The 'set_qso' function does not run against default semantic models."
220
+ )
221
+
221
222
  if max_read_only_replicas == 0:
222
223
  disable_qso(dataset=dataset, workspace=workspace)
223
224
  return
@@ -225,32 +226,33 @@ def set_qso(
225
226
  request_body = {
226
227
  "queryScaleOutSettings": {
227
228
  "autoSyncReadOnlyReplicas": auto_sync,
228
- "maxReadOnlyReplicas": str(max_read_only_replicas),
229
+ "maxReadOnlyReplicas": max_read_only_replicas,
229
230
  }
230
231
  }
231
232
 
232
- ssm = set_semantic_model_storage_format(
233
- dataset=dataset, storage_format="Large", workspace=workspace
234
- )
235
- if ssm == 200:
236
- client = fabric.PowerBIRestClient()
237
- response = client.patch(
238
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
239
- json=request_body,
240
- )
241
- if response.status_code != 200:
242
- raise FabricHTTPException(response)
233
+ dfL = list_qso_settings(dataset=dataset, workspace=workspace)
234
+ storage_mode = dfL["Storage Mode"].iloc[0]
243
235
 
244
- df = list_qso_settings(dataset=dataset, workspace=workspace)
245
- print(
246
- f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
247
- )
248
- return df
249
- else:
250
- raise ValueError(
251
- f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\""
236
+ if storage_mode == "Small":
237
+ set_semantic_model_storage_format(
238
+ dataset=dataset, storage_format="Large", workspace=workspace
252
239
  )
253
240
 
241
+ client = fabric.PowerBIRestClient()
242
+ response = client.patch(
243
+ f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
244
+ json=request_body,
245
+ )
246
+ if response.status_code != 200:
247
+ raise FabricHTTPException(response)
248
+
249
+ df = list_qso_settings(dataset=dataset, workspace=workspace)
250
+ print(
251
+ f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
252
+ )
253
+
254
+ return df
255
+
254
256
 
255
257
  def set_semantic_model_storage_format(
256
258
  dataset: str, storage_format: str, workspace: Optional[str] = None
@@ -268,10 +270,6 @@ def set_semantic_model_storage_format(
268
270
  The Fabric workspace name.
269
271
  Defaults to None which resolves to the workspace of the attached lakehouse
270
272
  or if no lakehouse attached, resolves to the workspace of the notebook.
271
-
272
- Returns
273
- -------
274
-
275
273
  """
276
274
 
277
275
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -3,6 +3,7 @@ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
3
3
  from sempy_labs.lakehouse._lakehouse import (
4
4
  lakehouse_attached,
5
5
  optimize_lakehouse_tables,
6
+ vacuum_lakehouse_tables,
6
7
  )
7
8
 
8
9
  from sempy_labs.lakehouse._shortcuts import (
@@ -19,4 +20,5 @@ __all__ = [
19
20
  # create_shortcut,
20
21
  "create_shortcut_onelake",
21
22
  "delete_shortcut",
23
+ "vacuum_lakehouse_tables",
22
24
  ]