semantic-link-labs 0.8.0__tar.gz → 0.8.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (190) hide show
  1. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/PKG-INFO +39 -7
  2. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/README.md +38 -6
  3. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/docs/requirements.txt +1 -0
  4. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/docs/source/conf.py +1 -1
  5. semantic_link_labs-0.8.2/notebooks/Tabular Object Model.ipynb +1 -0
  6. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/pyproject.toml +1 -1
  7. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/semantic_link_labs.egg-info/PKG-INFO +39 -7
  8. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/semantic_link_labs.egg-info/SOURCES.txt +11 -0
  9. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/__init__.py +70 -51
  10. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_ai.py +0 -2
  11. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_capacity_migration.py +1 -2
  12. semantic_link_labs-0.8.2/src/sempy_labs/_data_pipelines.py +118 -0
  13. semantic_link_labs-0.8.2/src/sempy_labs/_documentation.py +144 -0
  14. semantic_link_labs-0.8.2/src/sempy_labs/_eventhouses.py +118 -0
  15. semantic_link_labs-0.8.2/src/sempy_labs/_eventstreams.py +118 -0
  16. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_generate_semantic_model.py +3 -3
  17. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_git.py +3 -3
  18. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_helper_functions.py +117 -26
  19. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_icons.py +21 -0
  20. semantic_link_labs-0.8.2/src/sempy_labs/_kql_databases.py +134 -0
  21. semantic_link_labs-0.8.2/src/sempy_labs/_kql_querysets.py +124 -0
  22. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_list_functions.py +12 -425
  23. semantic_link_labs-0.8.2/src/sempy_labs/_mirrored_warehouses.py +50 -0
  24. semantic_link_labs-0.8.2/src/sempy_labs/_ml_experiments.py +122 -0
  25. semantic_link_labs-0.8.2/src/sempy_labs/_ml_models.py +120 -0
  26. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_model_auto_build.py +0 -4
  27. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_model_bpa.py +11 -11
  28. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_model_bpa_bulk.py +8 -7
  29. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_model_dependencies.py +26 -18
  30. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_notebooks.py +5 -16
  31. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_query_scale_out.py +2 -2
  32. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_refresh_semantic_model.py +7 -19
  33. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_spark.py +10 -10
  34. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_vertipaq.py +16 -18
  35. semantic_link_labs-0.8.2/src/sempy_labs/_warehouses.py +132 -0
  36. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_workspaces.py +0 -3
  37. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/admin/_basic_functions.py +92 -10
  38. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/admin/_domains.py +1 -1
  39. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_directlake_schema_sync.py +1 -1
  40. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_dl_helper.py +32 -16
  41. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_guardrails.py +7 -7
  42. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  43. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_warm_cache.py +1 -1
  44. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
  45. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/lakehouse/_lakehouse.py +3 -2
  46. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  47. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_generate_report.py +1 -1
  48. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_report_bpa.py +13 -3
  49. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_reportwrapper.py +14 -16
  50. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/tom/_model.py +261 -24
  51. semantic_link_labs-0.8.2/tests/test_friendly_case.py +11 -0
  52. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/tests/test_shortcuts.py +1 -1
  53. semantic_link_labs-0.8.0/notebooks/Tabular Object Model.ipynb +0 -1
  54. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  55. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  56. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.github/ISSUE_TEMPLATE/issue--question---advice-needed.md +0 -0
  57. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.github/workflows/build.yaml +0 -0
  58. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.github/workflows/codeql.yaml +0 -0
  59. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.gitignore +0 -0
  60. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.readthedocs.yaml +0 -0
  61. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/.vscode/settings.json +0 -0
  62. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/CODE_OF_CONDUCT.md +0 -0
  63. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/LICENSE +0 -0
  64. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/SECURITY.md +0 -0
  65. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/SUPPORT.md +0 -0
  66. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/docs/Makefile +0 -0
  67. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/docs/make.bat +0 -0
  68. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/docs/source/index.rst +0 -0
  69. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/docs/source/modules.rst +0 -0
  70. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/environment.yml +0 -0
  71. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Best Practice Analyzer Report.ipynb +0 -0
  72. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Capacity Migration.ipynb +0 -0
  73. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Migration to Direct Lake.ipynb +0 -0
  74. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Model Optimization.ipynb +0 -0
  75. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Query Scale Out.ipynb +0 -0
  76. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Report Analysis.ipynb +0 -0
  77. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/SQL.ipynb +0 -0
  78. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/notebooks/Semantic Model Refresh.ipynb +0 -0
  79. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/setup.cfg +0 -0
  80. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
  81. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/semantic_link_labs.egg-info/requires.txt +0 -0
  82. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
  83. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_am-ET.po +0 -0
  84. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +0 -0
  85. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +0 -0
  86. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +0 -0
  87. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +0 -0
  88. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_da-DK.po +0 -0
  89. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_de-DE.po +0 -0
  90. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_el-GR.po +0 -0
  91. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_es-ES.po +0 -0
  92. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +0 -0
  93. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +0 -0
  94. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +0 -0
  95. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +0 -0
  96. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_he-IL.po +0 -0
  97. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +0 -0
  98. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +0 -0
  99. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_id-ID.po +0 -0
  100. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_is-IS.po +0 -0
  101. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_it-IT.po +0 -0
  102. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +0 -0
  103. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +0 -0
  104. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +0 -0
  105. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +0 -0
  106. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +0 -0
  107. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +0 -0
  108. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +0 -0
  109. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +0 -0
  110. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +0 -0
  111. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +0 -0
  112. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +0 -0
  113. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +0 -0
  114. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +0 -0
  115. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_te-IN.po +0 -0
  116. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_th-TH.po +0 -0
  117. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +0 -0
  118. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +0 -0
  119. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +0 -0
  120. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +0 -0
  121. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_capacities.py +0 -0
  122. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_clear_cache.py +0 -0
  123. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_connections.py +0 -0
  124. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_dataflows.py +0 -0
  125. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_dax.py +0 -0
  126. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_deployment_pipelines.py +0 -0
  127. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_environments.py +0 -0
  128. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_model_bpa_rules.py +0 -0
  129. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_one_lake_integration.py +0 -0
  130. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_sql.py +0 -0
  131. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_translations.py +0 -0
  132. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/_workspace_identity.py +0 -0
  133. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/admin/__init__.py +0 -0
  134. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/__init__.py +0 -0
  135. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_directlake_schema_compare.py +0 -0
  136. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_generate_shared_expression.py +0 -0
  137. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +0 -0
  138. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_get_shared_expression.py +0 -0
  139. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +0 -0
  140. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +0 -0
  141. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +0 -0
  142. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/lakehouse/__init__.py +0 -0
  143. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +0 -0
  144. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/lakehouse/_shortcuts.py +0 -0
  145. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/__init__.py +0 -0
  146. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_create_pqt_file.py +0 -0
  147. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +0 -0
  148. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +0 -0
  149. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +0 -0
  150. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_migration_validation.py +0 -0
  151. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/migration/_refresh_calc_tables.py +0 -0
  152. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_BPAReportTemplate.json +0 -0
  153. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/__init__.py +0 -0
  154. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -0
  155. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/.platform +0 -0
  156. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +0 -0
  157. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +0 -0
  158. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +0 -0
  159. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +0 -0
  160. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +0 -0
  161. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +0 -0
  162. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +0 -0
  163. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +0 -0
  164. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +0 -0
  165. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +0 -0
  166. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +0 -0
  167. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +0 -0
  168. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +0 -0
  169. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +0 -0
  170. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +0 -0
  171. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +0 -0
  172. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +0 -0
  173. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +0 -0
  174. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +0 -0
  175. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +0 -0
  176. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +0 -0
  177. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +0 -0
  178. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +0 -0
  179. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +0 -0
  180. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/report.json +0 -0
  181. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition/version.json +0 -0
  182. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_bpareporttemplate/definition.pbir +0 -0
  183. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_report_bpa_rules.py +0 -0
  184. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_report_functions.py +0 -0
  185. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_report_helper.py +0 -0
  186. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_report_list_functions.py +0 -0
  187. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/report/_report_rebind.py +0 -0
  188. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/src/sempy_labs/tom/__init__.py +0 -0
  189. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/tests/__init__.py +0 -0
  190. {semantic_link_labs-0.8.0 → semantic_link_labs-0.8.2}/tests/test_tom.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.8.0
3
+ Version: 0.8.2
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -27,7 +27,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -35,9 +35,31 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
35
35
  [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
36
36
  ---
37
37
 
38
- This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
39
-
40
- Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
38
+ Semantic Link Labs is a Python library designed for use in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library extends the capabilities of [Semantic Link](https://learn.microsoft.com/fabric/data-science/semantic-link-overview) offering additional functionalities to seamlessly integrate and work alongside it. The goal of Semantic Link Labs is to simplify technical processes, empowering people to focus on higher level activities and allowing tasks that are better suited for machines to be efficiently handled without human intervention.
39
+
40
+ ## Featured Scenarios
41
+ * Semantic Models
42
+ * [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
43
+ * [Model Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa)
44
+ * [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer)
45
+ * [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
46
+ * [Translate a semantic model's metadata](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model)
47
+ * [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
48
+ * [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model), [restore](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
49
+ * [Run DAX queries which impersonate a user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation)
50
+ * Reports
51
+ * [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa)
52
+ * [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb)
53
+ * [View semantic model objects most frequently used in Power BI reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_object_report_usage)
54
+ * [View broken reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_report_semantic_model_objects)
55
+ * [Rebind reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind)
56
+ * Capacities
57
+ * [Migrating a Power BI Premium capacity (P sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
58
+ * APIs
59
+ * Wrapper functions for [Power BI](https://learn.microsoft.com/rest/api/power-bi/), [Fabric](https://learn.microsoft.com/rest/api/fabric/articles/using-fabric-apis), and [Azure](https://learn.microsoft.com/rest/api/azure/?view=rest-power-bi-embedded-2021-01-01) APIs
60
+
61
+
62
+ ### Check out the [helper notebooks](https://github.com/microsoft/semantic-link-labs/tree/main/notebooks) for getting started!
41
63
 
42
64
  If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
43
65
 
@@ -81,7 +103,9 @@ An even better way to ensure the semantic-link-labs library is available in your
81
103
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
82
104
 
83
105
  ## Version History
84
- * [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 24, 2024)
106
+ * [0.8.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.2) (October 2, 2024)
107
+ * [0.8.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.1) (October 2, 2024)
108
+ * [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 25, 2024)
85
109
  * [0.7.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.4) (September 16, 2024)
86
110
  * [0.7.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.3) (September 11, 2024)
87
111
  * [0.7.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.2) (August 30, 2024)
@@ -128,7 +152,7 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
128
152
  5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
129
153
 
130
154
  > [!NOTE]
131
- > As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
155
+ > Calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table). [Auto date/time tables](https://learn.microsoft.com/power-bi/guidance/auto-date-time) are not migrated. Auto date/time must be disabled in Power BI Desktop and proper date table(s) must be created prior to migration.
132
156
 
133
157
  6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
134
158
 
@@ -141,6 +165,14 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
141
165
  * Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
142
166
  * Reports used by your original semantic model will be rebinded to your new semantic model.
143
167
 
168
+ ### Limitations
169
+ * Calculated columns are not migrated.
170
+ * Auto date/time tables are not migrated.
171
+ * References to calculated columns in Field Parameters are removed.
172
+ * References to calculated columns in measure expressions or other DAX expressions will break.
173
+ * Calculated tables are migrated as possible. The success of this migration depends on the interdependencies and complexity of the calculated table. This part of the migration is a workaround as technically calculated tables are not supported in Direct Lake.
174
+ * See [here](https://learn.microsoft.com/fabric/get-started/direct-lake-overview#considerations-and-limitations) for the rest of the limitations of Direct Lake.
175
+
144
176
  ## Contributing
145
177
 
146
178
  This project welcomes contributions and suggestions. Most contributions require you to agree to a
@@ -1,7 +1,7 @@
1
1
  # Semantic Link Labs
2
2
 
3
3
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
4
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
4
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
5
5
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
6
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
7
7
 
@@ -9,9 +9,31 @@
9
9
  [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
10
10
  ---
11
11
 
12
- This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
13
-
14
- Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
12
+ Semantic Link Labs is a Python library designed for use in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library extends the capabilities of [Semantic Link](https://learn.microsoft.com/fabric/data-science/semantic-link-overview) offering additional functionalities to seamlessly integrate and work alongside it. The goal of Semantic Link Labs is to simplify technical processes, empowering people to focus on higher level activities and allowing tasks that are better suited for machines to be efficiently handled without human intervention.
13
+
14
+ ## Featured Scenarios
15
+ * Semantic Models
16
+ * [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
17
+ * [Model Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa)
18
+ * [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer)
19
+ * [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
20
+ * [Translate a semantic model's metadata](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model)
21
+ * [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
22
+ * [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model), [restore](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
23
+ * [Run DAX queries which impersonate a user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation)
24
+ * Reports
25
+ * [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa)
26
+ * [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb)
27
+ * [View semantic model objects most frequently used in Power BI reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_object_report_usage)
28
+ * [View broken reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_report_semantic_model_objects)
29
+ * [Rebind reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind)
30
+ * Capacities
31
+ * [Migrating a Power BI Premium capacity (P sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
32
+ * APIs
33
+ * Wrapper functions for [Power BI](https://learn.microsoft.com/rest/api/power-bi/), [Fabric](https://learn.microsoft.com/rest/api/fabric/articles/using-fabric-apis), and [Azure](https://learn.microsoft.com/rest/api/azure/?view=rest-power-bi-embedded-2021-01-01) APIs
34
+
35
+
36
+ ### Check out the [helper notebooks](https://github.com/microsoft/semantic-link-labs/tree/main/notebooks) for getting started!
15
37
 
16
38
  If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
17
39
 
@@ -55,7 +77,9 @@ An even better way to ensure the semantic-link-labs library is available in your
55
77
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
56
78
 
57
79
  ## Version History
58
- * [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 24, 2024)
80
+ * [0.8.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.2) (October 2, 2024)
81
+ * [0.8.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.1) (October 2, 2024)
82
+ * [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 25, 2024)
59
83
  * [0.7.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.4) (September 16, 2024)
60
84
  * [0.7.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.3) (September 11, 2024)
61
85
  * [0.7.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.2) (August 30, 2024)
@@ -102,7 +126,7 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
102
126
  5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
103
127
 
104
128
  > [!NOTE]
105
- > As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
129
+ > Calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table). [Auto date/time tables](https://learn.microsoft.com/power-bi/guidance/auto-date-time) are not migrated. Auto date/time must be disabled in Power BI Desktop and proper date table(s) must be created prior to migration.
106
130
 
107
131
  6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
108
132
 
@@ -115,6 +139,14 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
115
139
  * Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
116
140
  * Reports used by your original semantic model will be rebinded to your new semantic model.
117
141
 
142
+ ### Limitations
143
+ * Calculated columns are not migrated.
144
+ * Auto date/time tables are not migrated.
145
+ * References to calculated columns in Field Parameters are removed.
146
+ * References to calculated columns in measure expressions or other DAX expressions will break.
147
+ * Calculated tables are migrated as possible. The success of this migration depends on the interdependencies and complexity of the calculated table. This part of the migration is a workaround as technically calculated tables are not supported in Direct Lake.
148
+ * See [here](https://learn.microsoft.com/fabric/get-started/direct-lake-overview#considerations-and-limitations) for the rest of the limitations of Direct Lake.
149
+
118
150
  ## Contributing
119
151
 
120
152
  This project welcomes contributions and suggestions. Most contributions require you to agree to a
@@ -10,5 +10,6 @@ azure-storage-blob>=12.9.0
10
10
  anytree
11
11
  IPython
12
12
  polib
13
+ powerbiclient
13
14
  azure.mgmt.resource
14
15
  jsonpath_ng
@@ -13,7 +13,7 @@ sys.path.insert(0, os.path.abspath('../../src/'))
13
13
  project = 'semantic-link-labs'
14
14
  copyright = '2024, Microsoft and community'
15
15
  author = 'Microsoft and community'
16
- release = '0.8.0'
16
+ release = '0.8.2'
17
17
 
18
18
  # -- General configuration ---------------------------------------------------
19
19
  # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Connect to the [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))\n","Setting the 'readonly' property to False enables read/write mode. This allows changes to be made to the semantic model."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs.tom import connect_semantic_model\n","\n","dataset = '' # Enter dataset name\n","workspace = None # Enter workspace name\n","\n","with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," print(t.Name)"]},{"cell_type":"markdown","id":"fc6b277e","metadata":{},"source":["### Make changes to a semantic model using custom functions\n","Note that the custom functions have additional optional parameters (which may not be used in the examples below) for adding properties to model objects. Check the [documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) to see all available parameters for each function."]},{"cell_type":"markdown","id":"6d46d878","metadata":{},"source":["#### Rename objects in the semantic model"]},{"cell_type":"code","execution_count":null,"id":"1284825a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," t.Name = t.Name.replace('_',' ')\n"]},{"cell_type":"code","execution_count":null,"id":"d3b60303","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," c.Name = c.Name.replace('_',' ')"]},{"cell_type":"markdown","id":"402a477c","metadata":{},"source":["#### Add measure(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"bdaaaa5c","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_measure(table_name='Internet Sales', measure_name='Sales Amount', expression=\"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name='Internet Sales', measure_name='Order Quantity', expression=\"SUM('Internet Sales'[OrderQty])\") "]},{"cell_type":"code","execution_count":null,"id":"a53a544b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Internet Sales':\n"," tom.add_measure(table_name=t.Name, measure_name='Sales Amount', expression=\"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name=t.Name, measure_name='Order Quantity', expression=\"SUM('Internet Sales'[OrderQty])\")"]},{"cell_type":"markdown","id":"1cb1632f","metadata":{},"source":["#### Add column(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"81a22749","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_data_column(table_name='Product', column_name='Size Range', source_column='SizeRange', data_type='Int64')\n"," tom.add_data_column(table_name= 'Segment', column_name='Summary Segment', source_column='SummarySegment', data_type='String')\n","\n"," tom.add_calculated_column(table_name='Internet Sales', column_name='GrossMargin', expression=\"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type='Decimal')"]},{"cell_type":"code","execution_count":null,"id":"053b6516","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.add_data_column(table_name=t.Name, column_name='Size Range', source_column='SizeRange', data_type='Int64')\n"," elif t.Name == 'Segment':\n"," tom.add_data_column(table_name = t.Name, column_name='Summary Segment', source_column='SummarySegment', data_type='String')\n"," elif t.Name == 'Internet Sales':\n"," tom.add_calculated_column(table_name=t.Name, column_name='GrossMargin', expression=\"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type='Decimal')"]},{"cell_type":"markdown","id":"f53dcca7","metadata":{},"source":["#### Add hierarchies to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"a9309e23","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_hierarchy(table_name='Geography', hierarchy_name='Geo Hierarchy', levels=['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"code","execution_count":null,"id":"a04281ce","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Geography':\n"," tom.add_hierarchy(table_name=t.Name, hierarchy_name='Geo Hierarchy', levels=['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"markdown","id":"47c06a4f","metadata":{},"source":["#### Add relationship(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"e8cd7bbf","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_relationship(\n"," from_table='Internet Sales', from_column='ProductKey',\n"," to_table='Product', to_column ='ProductKey', \n"," from_cardinality='Many', to_cardinality='One')"]},{"cell_type":"markdown","id":"3cc7f11e","metadata":{},"source":["#### Add a table with an M partition to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0f5dd66a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name='Sales'\n"," tom.add_table(name=table_name)\n"," tom.add_m_partition(table_name=table_name, partition_name=table_name, expression='let....')"]},{"cell_type":"markdown","id":"ea389123","metadata":{},"source":["#### Add a table with an entity partition to a Direct Lake semantic model "]},{"cell_type":"code","execution_count":null,"id":"f75387d1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name=table_name)\n"," tom.add_entity_partition(table_name=table_name, entity_name=table_name)"]},{"cell_type":"markdown","id":"e74d0f54","metadata":{},"source":["#### Add a calculated table (and columns) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"934f7315","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_calculated_table(name=table_name, expression=\"DISTINCT('Product'[Color])\")\n"," tom.add_calculated_table_column(table_name=table_name, column_name='Color', source_column=\"'Product[Color]\", data_type='String')"]},{"cell_type":"markdown","id":"0e7088b7","metadata":{},"source":["#### Add role(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"ad60ebb9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_role(role_name='Reader')"]},{"cell_type":"markdown","id":"c541f81a","metadata":{},"source":["#### Set row level security (RLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"98603a08","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_rls(\n"," role_name='Reader', \n"," table_name='Product',\n"," filter_expression=\"'Dim Product'[Color] = \\\"Blue\\\"\"\n"," )"]},{"cell_type":"code","execution_count":null,"id":"effea009","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," tom.set_rls(role_name=r.Name, table_name='Product', filter_expression=\"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"markdown","id":"7fa7a03c","metadata":{},"source":["#### Set object level security (OLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"dd0def9d","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_ols(role_name='Reader', table_name='Product', column_name='Size', permission='None')"]},{"cell_type":"code","execution_count":null,"id":"7a389dc7","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.set_ols(role_name=r.Name, table_name=t.Name, column_name='Size', permission='None')"]},{"cell_type":"markdown","id":"d0f7ccd1","metadata":{},"source":["#### Add calculation groups and calculation items to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"97f4708b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_group(name='MyCalcGroup')"]},{"cell_type":"code","execution_count":null,"id":"fef68832","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_item(table_name='MyCalcGroup', calculation_item_name='YTD', expression=\"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name='MyCalcGroup', calculation_item_name='MTD', expression=\"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"code","execution_count":null,"id":"c7653dcc","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'MyCalcGroup':\n"," tom.add_calculation_item(table_name=t.Name, calculation_item_name='YTD', expression=\"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name=t.Name, calculation_item_name='MTD', expression=\"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"markdown","id":"c6450c74","metadata":{},"source":["#### Add translations to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"2b616b90","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_translation(language='it-IT')"]},{"cell_type":"code","execution_count":null,"id":"dc24c200","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_translation(object = tom.model.Tables['Product'], language='it-IT', property='Name', value='Produtto')"]},{"cell_type":"markdown","id":"3048cc95","metadata":{},"source":["#### Add a [Field Parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0a94af94","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_field_parameter(table_name='Parameter', objects=\"'Product'[Color], [Sales Amount], 'Geography'[Country]\")"]},{"cell_type":"markdown","id":"95aac09a","metadata":{},"source":["#### Remove an object(s) from a semantic model"]},{"cell_type":"code","execution_count":null,"id":"1e2572a8","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.remove_object(object=t.Columns['Size'])\n"," tom.remove_object(object=t.Hierarchies['Product Hierarchy'])"]},{"cell_type":"code","execution_count":null,"id":"bc453177","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.remove_object(object=tom.model.Tables['Product'].Columns['Size'])\n"," tom.remove_object(object=tom.model.Tables['Product'].Hierarchies['Product Hierarchy'])"]},{"cell_type":"markdown","id":"e0d0cb9e","metadata":{},"source":["### Custom functions to loop through non-top-level objects in a semantic model"]},{"cell_type":"code","execution_count":null,"id":"cbe3b1a3","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," print(c.Name)"]},{"cell_type":"code","execution_count":null,"id":"3f643e66","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for m in tom.all_measures():\n"," print(m.Name)"]},{"cell_type":"code","execution_count":null,"id":"ed1cde0f","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for p in tom.all_partitions():\n"," print(p.Name)"]},{"cell_type":"code","execution_count":null,"id":"f48014ae","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for h in tom.all_hierarchies():\n"," print(h.Name)"]},{"cell_type":"code","execution_count":null,"id":"9f5e7b72","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for ci in tom.all_calculation_items():\n"," print(ci.Name)"]},{"cell_type":"code","execution_count":null,"id":"3cd9ebc1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for l in tom.all_levels():\n"," print(l.Name)"]},{"cell_type":"code","execution_count":null,"id":"12c58bad","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for rls in tom.all_rls():\n"," print(rls.Name)"]},{"cell_type":"markdown","id":"1a294bd2","metadata":{},"source":["### See Vertipaq Analyzer stats"]},{"cell_type":"code","execution_count":null,"id":"469660e9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_vertipaq_annotations()\n","\n"," for t in tom.model.Tables:\n"," rc = tom.row_count(object = t)\n"," print(f\"{t.Name} : {str(rc)}\")\n"," for c in t.Columns:\n"," col_size = tom.total_size(object=c)\n"," print(labs.format_dax_object_name(t.Name, c.Name) + ' : ' + str(col_size))"]},{"cell_type":"markdown","id":"1ab26dfd","metadata":{},"source":["### 'UsedIn' functions"]},{"cell_type":"code","execution_count":null,"id":"412bf287","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for h in tom.used_in_hierarchies(column = c):\n"," print(f\"{full_name} : {h.Name}\")"]},{"cell_type":"code","execution_count":null,"id":"76556900","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for r in tom.used_in_relationships(object = c):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(f\"{full_name} : {rel_name}\")"]},{"cell_type":"code","execution_count":null,"id":"4d9ec24e","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," for r in tom.used_in_relationships(object = t):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(f\"{t.Name} : {rel_name}\")"]},{"cell_type":"code","execution_count":null,"id":"82251336","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," dep = labs.get_model_calc_dependencies(dataset = dataset, workspace=workspace)\n"," for o in tom.used_in_rls(object = tom.model.Tables['Product'].Columns['Color'], dependencies=dep):\n"," print(o.Name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -7,7 +7,7 @@ name="semantic-link-labs"
7
7
  authors = [
8
8
  { name = "Microsoft Corporation" },
9
9
  ]
10
- version="0.8.0"
10
+ version="0.8.2"
11
11
  description="Semantic Link Labs for Microsoft Fabric"
12
12
  readme="README.md"
13
13
  requires-python=">=3.10,<3.12"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.8.0
3
+ Version: 0.8.2
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -27,7 +27,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -35,9 +35,31 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
35
35
  [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
36
36
  ---
37
37
 
38
- This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
39
-
40
- Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
38
+ Semantic Link Labs is a Python library designed for use in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library extends the capabilities of [Semantic Link](https://learn.microsoft.com/fabric/data-science/semantic-link-overview) offering additional functionalities to seamlessly integrate and work alongside it. The goal of Semantic Link Labs is to simplify technical processes, empowering people to focus on higher level activities and allowing tasks that are better suited for machines to be efficiently handled without human intervention.
39
+
40
+ ## Featured Scenarios
41
+ * Semantic Models
42
+ * [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
43
+ * [Model Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa)
44
+ * [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer)
45
+ * [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
46
+ * [Translate a semantic model's metadata](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model)
47
+ * [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
48
+ * [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model), [restore](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
49
+ * [Run DAX queries which impersonate a user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation)
50
+ * Reports
51
+ * [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa)
52
+ * [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb)
53
+ * [View semantic model objects most frequently used in Power BI reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_object_report_usage)
54
+ * [View broken reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_report_semantic_model_objects)
55
+ * [Rebind reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind)
56
+ * Capacities
57
+ * [Migrating a Power BI Premium capacity (P sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
58
+ * APIs
59
+ * Wrapper functions for [Power BI](https://learn.microsoft.com/rest/api/power-bi/), [Fabric](https://learn.microsoft.com/rest/api/fabric/articles/using-fabric-apis), and [Azure](https://learn.microsoft.com/rest/api/azure/?view=rest-power-bi-embedded-2021-01-01) APIs
60
+
61
+
62
+ ### Check out the [helper notebooks](https://github.com/microsoft/semantic-link-labs/tree/main/notebooks) for getting started!
41
63
 
42
64
  If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
43
65
 
@@ -81,7 +103,9 @@ An even better way to ensure the semantic-link-labs library is available in your
81
103
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
82
104
 
83
105
  ## Version History
84
- * [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 24, 2024)
106
+ * [0.8.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.2) (October 2, 2024)
107
+ * [0.8.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.1) (October 2, 2024)
108
+ * [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 25, 2024)
85
109
  * [0.7.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.4) (September 16, 2024)
86
110
  * [0.7.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.3) (September 11, 2024)
87
111
  * [0.7.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.2) (August 30, 2024)
@@ -128,7 +152,7 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
128
152
  5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
129
153
 
130
154
  > [!NOTE]
131
- > As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
155
+ > Calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table). [Auto date/time tables](https://learn.microsoft.com/power-bi/guidance/auto-date-time) are not migrated. Auto date/time must be disabled in Power BI Desktop and proper date table(s) must be created prior to migration.
132
156
 
133
157
  6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
134
158
 
@@ -141,6 +165,14 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
141
165
  * Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
142
166
  * Reports used by your original semantic model will be rebinded to your new semantic model.
143
167
 
168
+ ### Limitations
169
+ * Calculated columns are not migrated.
170
+ * Auto date/time tables are not migrated.
171
+ * References to calculated columns in Field Parameters are removed.
172
+ * References to calculated columns in measure expressions or other DAX expressions will break.
173
+ * Calculated tables are migrated as possible. The success of this migration depends on the interdependencies and complexity of the calculated table. This part of the migration is a workaround as technically calculated tables are not supported in Direct Lake.
174
+ * See [here](https://learn.microsoft.com/fabric/get-started/direct-lake-overview#considerations-and-limitations) for the rest of the limitations of Direct Lake.
175
+
144
176
  ## Contributing
145
177
 
146
178
  This project welcomes contributions and suggestions. Most contributions require you to agree to a
@@ -39,15 +39,24 @@ src/sempy_labs/_capacities.py
39
39
  src/sempy_labs/_capacity_migration.py
40
40
  src/sempy_labs/_clear_cache.py
41
41
  src/sempy_labs/_connections.py
42
+ src/sempy_labs/_data_pipelines.py
42
43
  src/sempy_labs/_dataflows.py
43
44
  src/sempy_labs/_dax.py
44
45
  src/sempy_labs/_deployment_pipelines.py
46
+ src/sempy_labs/_documentation.py
45
47
  src/sempy_labs/_environments.py
48
+ src/sempy_labs/_eventhouses.py
49
+ src/sempy_labs/_eventstreams.py
46
50
  src/sempy_labs/_generate_semantic_model.py
47
51
  src/sempy_labs/_git.py
48
52
  src/sempy_labs/_helper_functions.py
49
53
  src/sempy_labs/_icons.py
54
+ src/sempy_labs/_kql_databases.py
55
+ src/sempy_labs/_kql_querysets.py
50
56
  src/sempy_labs/_list_functions.py
57
+ src/sempy_labs/_mirrored_warehouses.py
58
+ src/sempy_labs/_ml_experiments.py
59
+ src/sempy_labs/_ml_models.py
51
60
  src/sempy_labs/_model_auto_build.py
52
61
  src/sempy_labs/_model_bpa.py
53
62
  src/sempy_labs/_model_bpa_bulk.py
@@ -61,6 +70,7 @@ src/sempy_labs/_spark.py
61
70
  src/sempy_labs/_sql.py
62
71
  src/sempy_labs/_translations.py
63
72
  src/sempy_labs/_vertipaq.py
73
+ src/sempy_labs/_warehouses.py
64
74
  src/sempy_labs/_workspace_identity.py
65
75
  src/sempy_labs/_workspaces.py
66
76
  src/sempy_labs/_bpa_translation/_model/_translations_am-ET.po
@@ -172,5 +182,6 @@ src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/v
172
182
  src/sempy_labs/tom/__init__.py
173
183
  src/sempy_labs/tom/_model.py
174
184
  tests/__init__.py
185
+ tests/test_friendly_case.py
175
186
  tests/test_shortcuts.py
176
187
  tests/test_tom.py
@@ -1,3 +1,44 @@
1
+ from sempy_labs._ml_models import (
2
+ list_ml_models,
3
+ create_ml_model,
4
+ delete_ml_model,
5
+ )
6
+ from sempy_labs._ml_experiments import (
7
+ list_ml_experiments,
8
+ create_ml_experiment,
9
+ delete_ml_experiment,
10
+ )
11
+ from sempy_labs._warehouses import (
12
+ create_warehouse,
13
+ list_warehouses,
14
+ delete_warehouse,
15
+ )
16
+ from sempy_labs._data_pipelines import (
17
+ list_data_pipelines,
18
+ create_data_pipeline,
19
+ delete_data_pipeline,
20
+ )
21
+ from sempy_labs._eventhouses import (
22
+ create_eventhouse,
23
+ list_eventhouses,
24
+ delete_eventhouse,
25
+ )
26
+ from sempy_labs._eventstreams import (
27
+ list_eventstreams,
28
+ create_eventstream,
29
+ delete_eventstream,
30
+ )
31
+ from sempy_labs._kql_querysets import (
32
+ list_kql_querysets,
33
+ create_kql_queryset,
34
+ delete_kql_queryset,
35
+ )
36
+ from sempy_labs._kql_databases import (
37
+ list_kql_databases,
38
+ create_kql_database,
39
+ delete_kql_database,
40
+ )
41
+ from sempy_labs._mirrored_warehouses import list_mirrored_warehouses
1
42
  from sempy_labs._environments import (
2
43
  create_environment,
3
44
  delete_environment,
@@ -23,8 +64,6 @@ from sempy_labs._capacity_migration import (
23
64
  )
24
65
  from sempy_labs._capacities import (
25
66
  create_fabric_capacity,
26
- # get_capacity_resource_governance,
27
- # list_vcores,
28
67
  resume_fabric_capacity,
29
68
  suspend_fabric_capacity,
30
69
  update_fabric_capacity,
@@ -33,7 +72,6 @@ from sempy_labs._capacities import (
33
72
  delete_embedded_capacity,
34
73
  delete_premium_capacity,
35
74
  )
36
-
37
75
  from sempy_labs._spark import (
38
76
  get_spark_settings,
39
77
  update_spark_settings,
@@ -42,7 +80,6 @@ from sempy_labs._spark import (
42
80
  delete_custom_pool,
43
81
  update_custom_pool,
44
82
  )
45
-
46
83
  from sempy_labs._workspaces import (
47
84
  list_workspace_users,
48
85
  update_workspace_user,
@@ -107,27 +144,14 @@ from sempy_labs._list_functions import (
107
144
  list_shortcuts,
108
145
  get_object_level_security,
109
146
  list_capacities,
110
- # list_annotations,
111
- # list_columns,
112
147
  list_dashboards,
113
- # list_datamarts,
114
- # list_datapipelines,
115
- # list_eventstreams,
116
- # list_kpis,
117
- # list_kqldatabases,
118
- # list_kqlquerysets,
148
+ list_datamarts,
119
149
  list_lakehouses,
120
- # list_mirroredwarehouses,
121
- # list_mlexperiments,
122
- # list_mlmodels,
123
- # list_relationships,
124
- # list_sqlendpoints,
125
- # list_tables,
126
- list_warehouses,
127
- create_warehouse,
150
+ list_sql_endpoints,
128
151
  update_item,
129
152
  )
130
153
  from sempy_labs._helper_functions import (
154
+ convert_to_friendly_case,
131
155
  resolve_environment_id,
132
156
  resolve_capacity_id,
133
157
  resolve_warehouse_id,
@@ -150,10 +174,7 @@ from sempy_labs._helper_functions import (
150
174
  get_capacity_id,
151
175
  get_capacity_name,
152
176
  resolve_capacity_name,
153
- # language_validate
154
177
  )
155
-
156
- # from sempy_labs._model_auto_build import model_auto_build
157
178
  from sempy_labs._model_bpa_bulk import (
158
179
  run_model_bpa_bulk,
159
180
  create_model_bpa_semantic_model,
@@ -184,7 +205,6 @@ from sempy_labs._refresh_semantic_model import (
184
205
  from sempy_labs._translations import translate_semantic_model
185
206
  from sempy_labs._vertipaq import (
186
207
  vertipaq_analyzer,
187
- # visualize_vertipaq,
188
208
  import_vertipaq_analyzer,
189
209
  )
190
210
 
@@ -218,26 +238,13 @@ __all__ = [
218
238
  "deploy_semantic_model",
219
239
  "get_semantic_model_bim",
220
240
  "get_object_level_security",
221
- #'list_annotations',
222
- #'list_columns',
223
241
  "list_dashboards",
224
242
  "list_dataflow_storage_accounts",
225
- #'list_datamarts',
226
- #'list_datapipelines',
227
- #'list_eventstreams',
228
- #'list_kpis',
229
- #'list_kqldatabases',
230
- #'list_kqlquerysets',
231
243
  "list_lakehouses",
232
- #'list_mirroredwarehouses',
233
- #'list_mlexperiments',
234
- #'list_mlmodels',
235
- #'list_relationships',
236
- #'list_sqlendpoints',
237
- #'list_tables',
238
244
  "list_warehouses",
239
245
  "list_workspace_role_assignments",
240
246
  "create_warehouse",
247
+ "delete_warehouse",
241
248
  "update_item",
242
249
  "create_abfss_path",
243
250
  "format_dax_object_name",
@@ -251,8 +258,6 @@ __all__ = [
251
258
  "resolve_dataset_name",
252
259
  "resolve_report_id",
253
260
  "resolve_report_name",
254
- # 'language_validate',
255
- # "model_auto_build",
256
261
  "model_bpa_rules",
257
262
  "run_model_bpa",
258
263
  "measure_dependency_tree",
@@ -270,7 +275,6 @@ __all__ = [
270
275
  "cancel_dataset_refresh",
271
276
  "translate_semantic_model",
272
277
  "vertipaq_analyzer",
273
- # 'visualize_vertipaq',
274
278
  "import_vertipaq_analyzer",
275
279
  "list_semantic_model_objects",
276
280
  "list_shortcuts",
@@ -318,7 +322,6 @@ __all__ = [
318
322
  "delete_fabric_capacity",
319
323
  "resume_fabric_capacity",
320
324
  "update_fabric_capacity",
321
- "create_fabric_capacity",
322
325
  "delete_premium_capacity",
323
326
  "suspend_fabric_capacity",
324
327
  "delete_embedded_capacity",
@@ -327,17 +330,33 @@ __all__ = [
327
330
  "migrate_capacities",
328
331
  "create_fabric_capacity",
329
332
  "migrate_capacity_settings",
330
- # "get_capacity_resource_governance",
331
- # "list_vcores",
332
333
  "migrate_disaster_recovery_settings",
333
334
  "migrate_notification_settings",
334
335
  "migrate_access_settings",
335
336
  "migrate_delegated_tenant_settings",
336
- "resume_fabric_capacity",
337
- "suspend_fabric_capacity",
338
- "update_fabric_capacity",
339
- "delete_fabric_capacity",
340
- "check_fabric_capacity_name_availablility",
341
- "delete_embedded_capacity",
342
- "delete_premium_capacity",
337
+ "convert_to_friendly_case",
338
+ "list_mirrored_warehouses",
339
+ "list_kql_databases",
340
+ "create_kql_database",
341
+ "delete_kql_database",
342
+ "create_eventhouse",
343
+ "list_eventhouses",
344
+ "delete_eventhouse",
345
+ "list_data_pipelines",
346
+ "create_data_pipeline",
347
+ "delete_data_pipeline",
348
+ "list_eventstreams",
349
+ "create_eventstream",
350
+ "delete_eventstream",
351
+ "list_kql_querysets",
352
+ "create_kql_queryset",
353
+ "delete_kql_queryset",
354
+ "list_ml_models",
355
+ "create_ml_model",
356
+ "delete_ml_model",
357
+ "list_ml_experiments",
358
+ "create_ml_experiment",
359
+ "delete_ml_experiment",
360
+ "list_sql_endpoints",
361
+ "list_datamarts",
343
362
  ]
@@ -1,8 +1,6 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- from synapse.ml.services.openai import OpenAICompletion
5
- from pyspark.sql.functions import col
6
4
  from pyspark.sql import SparkSession
7
5
  from typing import List, Optional, Union
8
6
  from IPython.display import display