semantic-link-labs 0.4.2__tar.gz → 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (90) hide show
  1. semantic_link_labs-0.6.0/.github/ISSUE_TEMPLATE/bug_report.md +32 -0
  2. semantic_link_labs-0.6.0/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  3. semantic_link_labs-0.6.0/.github/ISSUE_TEMPLATE/issue--question---advice-needed.md +14 -0
  4. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/PKG-INFO +2 -2
  5. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/README.md +7 -6
  6. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/docs/source/conf.py +1 -1
  7. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/notebooks/Migration to Direct Lake.ipynb +1 -1
  8. semantic_link_labs-0.6.0/notebooks/Tabular Object Model.ipynb +1 -0
  9. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/pyproject.toml +2 -2
  10. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/PKG-INFO +2 -2
  11. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/SOURCES.txt +4 -0
  12. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/requires.txt +1 -1
  13. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/__init__.py +44 -14
  14. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_ai.py +31 -32
  15. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_clear_cache.py +5 -8
  16. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_connections.py +80 -72
  17. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_dax.py +7 -9
  18. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_generate_semantic_model.py +60 -54
  19. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_helper_functions.py +8 -10
  20. semantic_link_labs-0.6.0/src/sempy_labs/_icons.py +24 -0
  21. semantic_link_labs-0.6.0/src/sempy_labs/_list_functions.py +2339 -0
  22. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_model_auto_build.py +5 -6
  23. semantic_link_labs-0.6.0/src/sempy_labs/_model_bpa.py +363 -0
  24. semantic_link_labs-0.6.0/src/sempy_labs/_model_bpa_rules.py +831 -0
  25. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_model_dependencies.py +21 -25
  26. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_one_lake_integration.py +10 -7
  27. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_query_scale_out.py +83 -93
  28. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_refresh_semantic_model.py +12 -16
  29. semantic_link_labs-0.6.0/src/sempy_labs/_translations.py +304 -0
  30. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/_vertipaq.py +51 -42
  31. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/__init__.py +2 -0
  32. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_directlake_schema_compare.py +12 -11
  33. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_directlake_schema_sync.py +13 -23
  34. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_fallback.py +5 -7
  35. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  36. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_get_shared_expression.py +4 -8
  37. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_guardrails.py +6 -8
  38. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +18 -12
  39. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  40. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +9 -8
  41. semantic_link_labs-0.6.0/src/sempy_labs/directlake/_update_directlake_partition_entity.py +197 -0
  42. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_warm_cache.py +5 -5
  43. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  44. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +4 -4
  45. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_lakehouse.py +3 -4
  46. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_shortcuts.py +17 -13
  47. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/__init__.py +1 -1
  48. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_create_pqt_file.py +21 -24
  49. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  50. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  51. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +45 -46
  52. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  53. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migration_validation.py +6 -2
  54. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_refresh_calc_tables.py +10 -5
  55. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/report/__init__.py +2 -2
  56. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/report/_generate_report.py +8 -7
  57. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/report/_report_functions.py +47 -52
  58. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/report/_report_rebind.py +38 -37
  59. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/tom/__init__.py +1 -4
  60. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/tom/_model.py +541 -180
  61. semantic_link_labs-0.4.2/notebooks/Tabular Object Model.ipynb +0 -1
  62. semantic_link_labs-0.4.2/src/sempy_labs/_icons.py +0 -9
  63. semantic_link_labs-0.4.2/src/sempy_labs/_list_functions.py +0 -1628
  64. semantic_link_labs-0.4.2/src/sempy_labs/_model_bpa.py +0 -1354
  65. semantic_link_labs-0.4.2/src/sempy_labs/_translations.py +0 -378
  66. semantic_link_labs-0.4.2/src/sempy_labs/directlake/_update_directlake_partition_entity.py +0 -80
  67. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/.github/workflows/build.yaml +0 -0
  68. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/.github/workflows/codeql.yaml +0 -0
  69. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/.gitignore +0 -0
  70. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/.readthedocs.yaml +0 -0
  71. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/.vscode/settings.json +0 -0
  72. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/CODE_OF_CONDUCT.md +0 -0
  73. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/LICENSE +0 -0
  74. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/SECURITY.md +0 -0
  75. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/SUPPORT.md +0 -0
  76. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/docs/Makefile +0 -0
  77. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/docs/make.bat +0 -0
  78. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/docs/requirements.txt +0 -0
  79. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/docs/source/index.rst +0 -0
  80. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/docs/source/modules.rst +0 -0
  81. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/environment.yml +0 -0
  82. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/notebooks/Model Optimization.ipynb +0 -0
  83. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/notebooks/Query Scale Out.ipynb +0 -0
  84. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/setup.cfg +0 -0
  85. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
  86. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
  87. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/__init__.py +0 -0
  88. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/tests/__init__.py +0 -0
  89. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/tests/test_shortcuts.py +0 -0
  90. {semantic_link_labs-0.4.2 → semantic_link_labs-0.6.0}/tests/test_tom.py +0 -0
@@ -0,0 +1,32 @@
1
+ ---
2
+ name: Bug report
3
+ about: Create a report to help us improve
4
+ title: ''
5
+ labels: bug
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **Describe the bug**
11
+ A clear and concise description of what the bug is.
12
+
13
+ **To Reproduce**
14
+ Steps to reproduce the behavior:
15
+ 1. Go to '...'
16
+ 2. Click on '....'
17
+ 3. Scroll down to '....'
18
+ 4. See error
19
+
20
+ **Expected behavior**
21
+ A clear and concise description of what you expected to happen.
22
+
23
+ **Screenshots**
24
+ If applicable, add screenshots to help explain your problem.
25
+
26
+ **Desktop (please complete the following information):**
27
+ - OS: [e.g. iOS]
28
+ - Browser [e.g. chrome, safari]
29
+ - Version [e.g. 22]
30
+
31
+ **Additional context**
32
+ Add any other context about the problem here.
@@ -0,0 +1,20 @@
1
+ ---
2
+ name: Feature request
3
+ about: Suggest an idea for this project
4
+ title: ''
5
+ labels: enhancement
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **Is your feature request related to a problem? Please describe.**
11
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12
+
13
+ **Describe the solution you'd like**
14
+ A clear and concise description of what you want to happen.
15
+
16
+ **Describe alternatives you've considered**
17
+ A clear and concise description of any alternative solutions or features you've considered.
18
+
19
+ **Additional context**
20
+ Add any other context or screenshots about the feature request here.
@@ -0,0 +1,14 @@
1
+ ---
2
+ name: 'Issue: Question / Advice needed'
3
+ about: Ask a question about this project
4
+ title: ''
5
+ labels: question
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **What are you trying to achieve?**
11
+ Provide a brief background description of the problem you're trying to solve. Include information about the semantic engine you're working on (Power BI Desktop, Azure Analysis Services, etc.), as not everything is possible across engines.
12
+
13
+ **What have you tried so far?**
14
+ If applicable, describe the steps you have tried so far. For scripting-related questions, show the code that you have written.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.4.2
3
+ Version: 0.6.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
14
14
  Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  License-File: LICENSE
17
- Requires-Dist: semantic-link-sempy>=0.7.5
17
+ Requires-Dist: semantic-link-sempy>=0.7.6
18
18
  Requires-Dist: anytree
19
19
  Requires-Dist: powerbiclient
20
20
  Provides-Extra: test
@@ -1,21 +1,22 @@
1
- # semantic-link-labs
1
+ # Semantic Link Labs
2
2
 
3
3
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
4
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.4.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
4
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.6.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
5
5
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
6
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
7
7
 
8
+ ---
9
+ [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
10
+ ---
8
11
 
9
- This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration-1). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#vertipaq_analyzer) or the [Best Practice Analyzer](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_lakehouse_tables) or accessing the [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/#tabular-object-model-tom) and more!
12
+ This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
10
13
 
11
- Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration-1).
14
+ Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
12
15
 
13
16
  If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
14
17
 
15
18
  If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
16
19
 
17
- ## [Function documentation](https://semantic-link-labs.readthedocs.io/en/0.4.2/)
18
-
19
20
  ## Install the library in a Fabric notebook
20
21
  ```python
21
22
  %pip install semantic-link-labs
@@ -13,7 +13,7 @@ sys.path.insert(0, os.path.abspath('../../src/'))
13
13
  project = 'semantic-link-labs'
14
14
  copyright = '2024, Microsoft and community'
15
15
  author = 'Microsoft and community'
16
- release = '0.4.2'
16
+ release = '0.6.0'
17
17
 
18
18
  # -- General configuration ---------------------------------------------------
19
19
  # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
@@ -1 +1 @@
1
- {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"969a29bf","metadata":{},"source":["### Import the library and set initial parameters"]},{"cell_type":"code","execution_count":null,"id":"29c923f8","metadata":{},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs import migration, report, directlake\n","\n","dataset_name = '' #Enter the import/DQ semantic model name\n","workspace_name = None #Enter the workspace of the import/DQ semantic model. It set to none it will use the current workspace.\n","new_dataset_name = '' #Enter the new Direct Lake semantic model name\n","new_dataset_workspace_name = None #Enter the workspace where the Direct Lake model will be created. If set to None it will use the current workspace.\n","lakehouse_name = None #Enter the lakehouse to be used for the Direct Lake model. If set to None it will use the lakehouse attached to the notebook.\n","lakehouse_workspace_name = None #Enter the lakehouse workspace. If set to None it will use the new_dataset_workspace_name."]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Create the [Power Query Template](https://learn.microsoft.com/power-query/power-query-template) file\n","\n","This encapsulates all of the semantic model's Power Query logic into a single file."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["migration.create_pqt_file(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"bf945d07-544c-4934-b7a6-cfdb90ca725e","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Import the Power Query Template to Dataflows Gen2\n","\n","- Open the [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222) and sync your files (right click -> Sync from OneLake)\n","\n","- Navigate to your lakehouse. From this window, create a new Dataflows Gen2 and import the Power Query Template file from OneLake (OneLake -> Workspace -> Lakehouse -> Files...), and publish the Dataflows Gen2.\n","\n","<div class=\"alert alert-block alert-info\">\n","<b>Important!:</b> Make sure to create the Dataflows Gen2 from within the lakehouse window. That will ensure that all the tables automatically map to that lakehouse as the destination. Otherwise, you will have to manually map each table to its destination individually.\n","</div>"]},{"cell_type":"markdown","id":"9975db7d","metadata":{},"source":["### Create the Direct Lake model based on the import/DQ semantic model\n","\n","Calculated columns are not migrated to the Direct Lake model as they are not supported in Direct Lake mode."]},{"cell_type":"code","execution_count":null,"id":"0a3616b5-566e-414e-a225-fb850d6418dc","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import time\n","labs.create_blank_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","\n","time.sleep(2)\n","\n","migration.migrate_calc_tables_to_lakehouse(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name)\n","migration.migrate_tables_columns_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name)\n","migration.migrate_calc_tables_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name)\n","migration.migrate_model_objects_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name)\n","migration.migrate_field_parameters(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name)\n","time.sleep(2)\n","migration.refresh_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","migration.refresh_calc_tables(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","migration.refresh_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"bb98bb13","metadata":{},"source":["### Show migrated/unmigrated objects"]},{"cell_type":"code","execution_count":null,"id":"5db2f22c","metadata":{},"outputs":[],"source":["migration.migration_validation(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name, \n"," workspace = workspace_name, \n"," new_dataset_workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"fa244e9d-87c2-4a66-a7e0-be539a0ac7de","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Rebind all reports using the old semantic model to the new Direct Lake semantic model"]},{"cell_type":"code","execution_count":null,"id":"d4e867cc","metadata":{},"outputs":[],"source":["report.report_rebind_all(\n"," dataset = dataset_name,\n"," dataset_workspace = workspace_name,\n"," new_dataset = new_dataset_name,\n"," new_dataset_workpace = new_dataset_workspace_name,\n"," report_workspace = workspace_name)"]},{"cell_type":"markdown","id":"3365d20d","metadata":{},"source":["### Rebind reports one-by-one (optional)"]},{"cell_type":"code","execution_count":null,"id":"056b7180-d7ac-492c-87e7-ac7d0e4bb929","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["report_name = '' # Enter report name which you want to rebind to the new Direct Lake model\n","\n","report.report_rebind(\n"," report = report_name,\n"," dataset = new_dataset_name,\n"," report_workspace=workspace_name,\n"," dataset_workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"526f2327","metadata":{},"source":["### Show unsupported objects"]},{"cell_type":"code","execution_count":null,"id":"a47376d7","metadata":{},"outputs":[],"source":["dfT, dfC, dfR = directlake.show_unsupported_direct_lake_objects(dataset = dataset_name, workspace = workspace_name)\n","\n","print('Calculated Tables are not supported...')\n","display(dfT)\n","print(\"Learn more about Direct Lake limitations here: https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations\")\n","print('Calculated columns are not supported. Columns of binary data type are not supported.')\n","display(dfC)\n","print('Columns used for relationship cannot be of data type datetime and they also must be of the same data type.')\n","display(dfR)"]},{"cell_type":"markdown","id":"ed08ba4c","metadata":{},"source":["### Schema check between semantic model tables/columns and lakehouse tables/columns\n","\n","This will list any tables/columns which are in the new semantic model but do not exist in the lakehouse"]},{"cell_type":"code","execution_count":null,"id":"03889ba4","metadata":{},"outputs":[],"source":["directlake.direct_lake_schema_compare(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"2229963b","metadata":{},"source":["### Show calculated tables which have been migrated to the Direct Lake semantic model as regular tables"]},{"cell_type":"code","execution_count":null,"id":"dd537d90","metadata":{},"outputs":[],"source":["directlake.list_direct_lake_model_calc_tables(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"name":"python","version":"3.12.3"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"969a29bf","metadata":{},"source":["### Import the library and set initial parameters"]},{"cell_type":"code","execution_count":null,"id":"29c923f8","metadata":{},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs import migration, report, directlake\n","\n","dataset_name = '' #Enter the import/DQ semantic model name\n","workspace_name = None #Enter the workspace of the import/DQ semantic model. It set to none it will use the current workspace.\n","new_dataset_name = '' #Enter the new Direct Lake semantic model name\n","new_dataset_workspace_name = None #Enter the workspace where the Direct Lake model will be created. If set to None it will use the current workspace.\n","lakehouse_name = None #Enter the lakehouse to be used for the Direct Lake model. If set to None it will use the lakehouse attached to the notebook.\n","lakehouse_workspace_name = None #Enter the lakehouse workspace. If set to None it will use the new_dataset_workspace_name."]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Create the [Power Query Template](https://learn.microsoft.com/power-query/power-query-template) file\n","\n","This encapsulates all of the semantic model's Power Query logic into a single file."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["migration.create_pqt_file(dataset = dataset_name, workspace = workspace_name)"]},{"cell_type":"markdown","id":"bf945d07-544c-4934-b7a6-cfdb90ca725e","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Import the Power Query Template to Dataflows Gen2\n","\n","- Open the [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222) and sync your files (right click -> Sync from OneLake)\n","\n","- Navigate to your lakehouse. From this window, create a new Dataflows Gen2 and import the Power Query Template file from OneLake (OneLake -> Workspace -> Lakehouse -> Files...), and publish the Dataflows Gen2.\n","\n","<div class=\"alert alert-block alert-info\">\n","<b>Important!:</b> Make sure to create the Dataflows Gen2 from within the lakehouse window. That will ensure that all the tables automatically map to that lakehouse as the destination. Otherwise, you will have to manually map each table to its destination individually.\n","</div>"]},{"cell_type":"markdown","id":"9975db7d","metadata":{},"source":["### Create the Direct Lake model based on the import/DQ semantic model\n","\n","Calculated columns are not migrated to the Direct Lake model as they are not supported in Direct Lake mode."]},{"cell_type":"code","execution_count":null,"id":"0a3616b5-566e-414e-a225-fb850d6418dc","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import time\n","labs.create_blank_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","\n","time.sleep(2)\n","\n","migration.migrate_calc_tables_to_lakehouse(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name)\n","migration.migrate_tables_columns_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name)\n","migration.migrate_calc_tables_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name,\n"," lakehouse = lakehouse_name,\n"," lakehouse_workspace = lakehouse_workspace_name)\n","migration.migrate_model_objects_to_semantic_model(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name)\n","migration.migrate_field_parameters(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name,\n"," workspace = workspace_name,\n"," new_dataset_workspace = new_dataset_workspace_name)\n","time.sleep(2)\n","labs.refresh_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","migration.refresh_calc_tables(dataset = new_dataset_name, workspace = new_dataset_workspace_name)\n","labs.refresh_semantic_model(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"bb98bb13","metadata":{},"source":["### Show migrated/unmigrated objects"]},{"cell_type":"code","execution_count":null,"id":"5db2f22c","metadata":{},"outputs":[],"source":["migration.migration_validation(\n"," dataset = dataset_name,\n"," new_dataset = new_dataset_name, \n"," workspace = workspace_name, \n"," new_dataset_workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"fa244e9d-87c2-4a66-a7e0-be539a0ac7de","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Rebind all reports using the old semantic model to the new Direct Lake semantic model"]},{"cell_type":"code","execution_count":null,"id":"d4e867cc","metadata":{},"outputs":[],"source":["report.report_rebind_all(\n"," dataset = dataset_name,\n"," dataset_workspace = workspace_name,\n"," new_dataset = new_dataset_name,\n"," new_dataset_workpace = new_dataset_workspace_name,\n"," report_workspace = workspace_name)"]},{"cell_type":"markdown","id":"3365d20d","metadata":{},"source":["### Rebind reports one-by-one (optional)"]},{"cell_type":"code","execution_count":null,"id":"056b7180-d7ac-492c-87e7-ac7d0e4bb929","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["report_name = '' # Enter report name which you want to rebind to the new Direct Lake model\n","\n","report.report_rebind(\n"," report = report_name,\n"," dataset = new_dataset_name,\n"," report_workspace=workspace_name,\n"," dataset_workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"526f2327","metadata":{},"source":["### Show unsupported objects"]},{"cell_type":"code","execution_count":null,"id":"a47376d7","metadata":{},"outputs":[],"source":["dfT, dfC, dfR = directlake.show_unsupported_direct_lake_objects(dataset = dataset_name, workspace = workspace_name)\n","\n","print('Calculated Tables are not supported...')\n","display(dfT)\n","print(\"Learn more about Direct Lake limitations here: https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations\")\n","print('Calculated columns are not supported. Columns of binary data type are not supported.')\n","display(dfC)\n","print('Columns used for relationship cannot be of data type datetime and they also must be of the same data type.')\n","display(dfR)"]},{"cell_type":"markdown","id":"ed08ba4c","metadata":{},"source":["### Schema check between semantic model tables/columns and lakehouse tables/columns\n","\n","This will list any tables/columns which are in the new semantic model but do not exist in the lakehouse"]},{"cell_type":"code","execution_count":null,"id":"03889ba4","metadata":{},"outputs":[],"source":["directlake.direct_lake_schema_compare(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]},{"cell_type":"markdown","id":"2229963b","metadata":{},"source":["### Show calculated tables which have been migrated to the Direct Lake semantic model as regular tables"]},{"cell_type":"code","execution_count":null,"id":"dd537d90","metadata":{},"outputs":[],"source":["directlake.list_direct_lake_model_calc_tables(dataset = new_dataset_name, workspace = new_dataset_workspace_name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"name":"python","version":"3.12.3"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Connect to the [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))\n","Setting the 'readonly' property to False enables read/write mode. This allows changes to be made to the semantic model."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs.tom import connect_semantic_model\n","\n","dataset = '' # Enter dataset name\n","workspace = None # Enter workspace name\n","\n","with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," print(t.Name)"]},{"cell_type":"markdown","id":"fc6b277e","metadata":{},"source":["### Make changes to a semantic model using custom functions\n","Note that the custom functions have additional optional parameters (which may not be used in the examples below) for adding properties to model objects. Check the [documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) to see all available parameters for each function."]},{"cell_type":"markdown","id":"6d46d878","metadata":{},"source":["#### Rename objects in the semantic model"]},{"cell_type":"code","execution_count":null,"id":"1284825a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," t.Name = t.Name.replace('_',' ')\n"]},{"cell_type":"code","execution_count":null,"id":"d3b60303","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," c.Name = c.Name.replace('_',' ')"]},{"cell_type":"markdown","id":"402a477c","metadata":{},"source":["#### Add measure(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"bdaaaa5c","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\") "]},{"cell_type":"code","execution_count":null,"id":"a53a544b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Internet Sales':\n"," tom.add_measure(table_name = t.Name, measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name = t.Name, measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\")"]},{"cell_type":"markdown","id":"1cb1632f","metadata":{},"source":["#### Add column(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"81a22749","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_data_column(table_name ='Product', column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," tom.add_data_column(table_name = 'Segment', column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n","\n"," tom.add_calculated_column(table_name = 'Internet Sales', column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"code","execution_count":null,"id":"053b6516","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," elif t.Name == 'Segment':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n"," elif t.Name == 'Internet Sales':\n"," tom.add_calculated_column(table_name = t.Name, column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"markdown","id":"f53dcca7","metadata":{},"source":["#### Add hierarchies to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"a9309e23","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_hierarchy(table_name = 'Geography', hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"code","execution_count":null,"id":"a04281ce","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Geography':\n"," tom.add_hierarchy(table_name = t.Name, hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"markdown","id":"47c06a4f","metadata":{},"source":["#### Add relationship(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"e8cd7bbf","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_relationship(\n"," from_table = 'Internet Sales', from_column = 'ProductKey',\n"," to_table = 'Product', to_column = 'ProductKey', \n"," from_cardinality = 'Many', to_cardinality = 'One')"]},{"cell_type":"markdown","id":"3cc7f11e","metadata":{},"source":["#### Add a table with an M partition to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0f5dd66a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_m_partition(table_name = table_name, partition_name = table_name, expression = 'let....')"]},{"cell_type":"markdown","id":"ea389123","metadata":{},"source":["#### Add a table with an entity partition to a Direct Lake semantic model "]},{"cell_type":"code","execution_count":null,"id":"f75387d1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_entity_partition(table_name = table_name, entity_name = table_name)"]},{"cell_type":"markdown","id":"e74d0f54","metadata":{},"source":["#### Add a calculated table (and columns) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"934f7315","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_calculated_table(name = table_name, expression = \"DISTINCT('Product'[Color])\")\n"," tom.add_calculated_table_column(table_name = table_name, column_name = 'Color', source_column = \"'Product[Color]\", data_type = 'String')"]},{"cell_type":"markdown","id":"0e7088b7","metadata":{},"source":["#### Add role(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"ad60ebb9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_role(role_name = 'Reader')"]},{"cell_type":"markdown","id":"c541f81a","metadata":{},"source":["#### Set row level security (RLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"98603a08","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_rls(role_name ='Reader', table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"code","execution_count":null,"id":"effea009","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," tom.set_rls(role_name = r.Name, table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"markdown","id":"7fa7a03c","metadata":{},"source":["#### Set object level security (OLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"dd0def9d","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_ols(role_name = 'Reader', table_name = 'Product', column_name = 'Size', permission = 'None')"]},{"cell_type":"code","execution_count":null,"id":"7a389dc7","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.set_ols(role_name = r.Name, table_name = t.Name, column_name = 'Size', permission = 'None')"]},{"cell_type":"markdown","id":"d0f7ccd1","metadata":{},"source":["#### Add calculation groups and calculation items to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"97f4708b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_group(name = 'MyCalcGroup')"]},{"cell_type":"code","execution_count":null,"id":"fef68832","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"code","execution_count":null,"id":"c7653dcc","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'MyCalcGroup':\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"markdown","id":"c6450c74","metadata":{},"source":["#### Add translations to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"2b616b90","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_translation(language = 'it-IT')"]},{"cell_type":"code","execution_count":null,"id":"dc24c200","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_translation(object = tom.model.Tables['Product'], language = 'it-IT', property = 'Name', value = 'Produtto')"]},{"cell_type":"markdown","id":"3048cc95","metadata":{},"source":["#### Add a [Field Parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0a94af94","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_field_parameter(table_name = 'Parameter', objects = \"'Product'[Color], [Sales Amount], 'Geography'[Country]\")"]},{"cell_type":"markdown","id":"95aac09a","metadata":{},"source":["#### Remove an object(s) from a semantic model"]},{"cell_type":"code","execution_count":null,"id":"1e2572a8","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.remove_object(object = t.Columns['Size'])\n"," tom.remove_object(object = t.Hierarchies['Product Hierarchy'])"]},{"cell_type":"code","execution_count":null,"id":"bc453177","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.remove_object(object = tom.model.Tables['Product'].Columns['Size'])\n"," tom.remove_object(object = tom.model.Tables['Product'].Hierarchies['Product Hierarchy'])"]},{"cell_type":"markdown","id":"e0d0cb9e","metadata":{},"source":["### Custom functions to loop through non-top-level objects in a semantic model"]},{"cell_type":"code","execution_count":null,"id":"cbe3b1a3","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," print(c.Name)"]},{"cell_type":"code","execution_count":null,"id":"3f643e66","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for m in tom.all_measures():\n"," print(m.Name)"]},{"cell_type":"code","execution_count":null,"id":"ed1cde0f","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for p in tom.all_partitions():\n"," print(p.Name)"]},{"cell_type":"code","execution_count":null,"id":"f48014ae","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for h in tom.all_hierarchies():\n"," print(h.Name)"]},{"cell_type":"code","execution_count":null,"id":"9f5e7b72","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for ci in tom.all_calculation_items():\n"," print(ci.Name)"]},{"cell_type":"code","execution_count":null,"id":"3cd9ebc1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for l in tom.all_levels():\n"," print(l.Name)"]},{"cell_type":"code","execution_count":null,"id":"12c58bad","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for rls in tom.all_rls():\n"," print(rls.Name)"]},{"cell_type":"markdown","id":"1a294bd2","metadata":{},"source":["### See Vertipaq Analyzer stats"]},{"cell_type":"code","execution_count":null,"id":"469660e9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_vertipaq_annotations()\n","\n"," for t in tom.model.Tables:\n"," rc = tom.row_count(object = t)\n"," print(t.Name + ' : ' + str(rc))\n"," for c in t.Columns:\n"," col_size = tom.total_size(column = c)\n"," print(labs.format_dax_object_name(t.Name, c.Name) + ' : ' + str(col_size))"]},{"cell_type":"markdown","id":"1ab26dfd","metadata":{},"source":["### 'UsedIn' functions"]},{"cell_type":"code","execution_count":null,"id":"412bf287","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for h in tom.used_in_hierarchies(column = c):\n"," print(full_name + ' : ' + h.Name)"]},{"cell_type":"code","execution_count":null,"id":"76556900","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for r in tom.used_in_relationships(object = c):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(full_name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"4d9ec24e","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," for r in tom.used_in_relationships(object = t):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(t.Name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"82251336","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," dep = labs.get_model_calc_dependencies(dataset = dataset, workspace=workspace)\n"," for o in tom.used_in_rls(object = tom.model.Tables['Product'].Columns['Color'], dependencies=dep):\n"," print(o.Name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -7,7 +7,7 @@ name="semantic-link-labs"
7
7
  authors = [
8
8
  { name = "Microsoft Corporation" },
9
9
  ]
10
- version="0.4.2"
10
+ version="0.6.0"
11
11
  description="Semantic Link Labs project"
12
12
  requires-python=">=3.10,<3.12"
13
13
  classifiers = [
@@ -22,7 +22,7 @@ classifiers = [
22
22
  license= { text = "MIT License" }
23
23
 
24
24
  dependencies = [
25
- "semantic-link-sempy>=0.7.5",
25
+ "semantic-link-sempy>=0.7.6",
26
26
  "anytree",
27
27
  "powerbiclient"
28
28
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.4.2
3
+ Version: 0.6.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
14
14
  Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  License-File: LICENSE
17
- Requires-Dist: semantic-link-sempy>=0.7.5
17
+ Requires-Dist: semantic-link-sempy>=0.7.6
18
18
  Requires-Dist: anytree
19
19
  Requires-Dist: powerbiclient
20
20
  Provides-Extra: test
@@ -7,6 +7,9 @@ SECURITY.md
7
7
  SUPPORT.md
8
8
  environment.yml
9
9
  pyproject.toml
10
+ .github/ISSUE_TEMPLATE/bug_report.md
11
+ .github/ISSUE_TEMPLATE/feature_request.md
12
+ .github/ISSUE_TEMPLATE/issue--question---advice-needed.md
10
13
  .github/workflows/build.yaml
11
14
  .github/workflows/codeql.yaml
12
15
  .vscode/settings.json
@@ -36,6 +39,7 @@ src/sempy_labs/_icons.py
36
39
  src/sempy_labs/_list_functions.py
37
40
  src/sempy_labs/_model_auto_build.py
38
41
  src/sempy_labs/_model_bpa.py
42
+ src/sempy_labs/_model_bpa_rules.py
39
43
  src/sempy_labs/_model_dependencies.py
40
44
  src/sempy_labs/_one_lake_integration.py
41
45
  src/sempy_labs/_query_scale_out.py
@@ -1,4 +1,4 @@
1
- semantic-link-sempy>=0.7.5
1
+ semantic-link-sempy>=0.7.6
2
2
  anytree
3
3
  powerbiclient
4
4
 
@@ -9,13 +9,15 @@ from sempy_labs._dax import evaluate_dax_impersonation
9
9
  from sempy_labs._generate_semantic_model import (
10
10
  create_blank_semantic_model,
11
11
  create_semantic_model_from_bim,
12
- # deploy_semantic_model,
12
+ deploy_semantic_model,
13
13
  get_semantic_model_bim,
14
14
  )
15
15
  from sempy_labs._list_functions import (
16
+ delete_custom_pool,
16
17
  list_semantic_model_objects,
17
18
  list_shortcuts,
18
19
  get_object_level_security,
20
+ list_capacities,
19
21
  # list_annotations,
20
22
  # list_columns,
21
23
  list_dashboards,
@@ -34,9 +36,21 @@ from sempy_labs._list_functions import (
34
36
  # list_sqlendpoints,
35
37
  # list_tables,
36
38
  list_warehouses,
37
- # list_workspace_role_assignments,
39
+ list_workspace_role_assignments,
38
40
  create_warehouse,
39
41
  update_item,
42
+ list_custom_pools,
43
+ create_custom_pool,
44
+ update_custom_pool,
45
+ assign_workspace_to_capacity,
46
+ unassign_workspace_from_capacity,
47
+ get_spark_settings,
48
+ update_spark_settings,
49
+ add_user_to_workspace,
50
+ delete_user_from_workspace,
51
+ update_workspace_user,
52
+ list_workspace_users,
53
+ assign_workspace_to_dataflow_storage,
40
54
  )
41
55
 
42
56
  from sempy_labs._helper_functions import (
@@ -54,8 +68,10 @@ from sempy_labs._helper_functions import (
54
68
  resolve_report_name,
55
69
  # language_validate
56
70
  )
71
+
57
72
  # from sempy_labs._model_auto_build import model_auto_build
58
- from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
73
+ from sempy_labs._model_bpa import run_model_bpa
74
+ from sempy_labs._model_bpa_rules import model_bpa_rules
59
75
  from sempy_labs._model_dependencies import (
60
76
  measure_dependency_tree,
61
77
  get_measure_dependencies,
@@ -85,6 +101,7 @@ from sempy_labs._vertipaq import (
85
101
  )
86
102
 
87
103
  __all__ = [
104
+ "delete_custom_pool",
88
105
  "clear_cache",
89
106
  # create_connection_cloud,
90
107
  # create_connection_vnet,
@@ -92,7 +109,7 @@ __all__ = [
92
109
  "evaluate_dax_impersonation",
93
110
  "create_blank_semantic_model",
94
111
  "create_semantic_model_from_bim",
95
- #'deploy_semantic_model',
112
+ "deploy_semantic_model",
96
113
  "get_semantic_model_bim",
97
114
  "get_object_level_security",
98
115
  #'list_annotations',
@@ -113,7 +130,7 @@ __all__ = [
113
130
  #'list_sqlendpoints',
114
131
  #'list_tables',
115
132
  "list_warehouses",
116
- #'list_workspace_role_assignments',
133
+ "list_workspace_role_assignments",
117
134
  "create_warehouse",
118
135
  "update_item",
119
136
  "create_abfss_path",
@@ -129,20 +146,20 @@ __all__ = [
129
146
  "resolve_report_id",
130
147
  "resolve_report_name",
131
148
  #'language_validate',
132
- #"model_auto_build",
149
+ # "model_auto_build",
133
150
  "model_bpa_rules",
134
151
  "run_model_bpa",
135
152
  "measure_dependency_tree",
136
153
  "get_measure_dependencies",
137
154
  "get_model_calc_dependencies",
138
155
  "export_model_to_onelake",
139
- 'qso_sync',
140
- 'qso_sync_status',
141
- 'set_qso',
142
- 'list_qso_settings',
143
- 'disable_qso',
144
- 'set_semantic_model_storage_format',
145
- 'set_workspace_default_storage_format',
156
+ "qso_sync",
157
+ "qso_sync_status",
158
+ "set_qso",
159
+ "list_qso_settings",
160
+ "disable_qso",
161
+ "set_semantic_model_storage_format",
162
+ "set_workspace_default_storage_format",
146
163
  "refresh_semantic_model",
147
164
  "cancel_dataset_refresh",
148
165
  "translate_semantic_model",
@@ -150,5 +167,18 @@ __all__ = [
150
167
  #'visualize_vertipaq',
151
168
  "import_vertipaq_analyzer",
152
169
  "list_semantic_model_objects",
153
- "list_shortcuts"
170
+ "list_shortcuts",
171
+ "list_custom_pools",
172
+ "create_custom_pool",
173
+ "update_custom_pool",
174
+ "assign_workspace_to_capacity",
175
+ "unassign_workspace_from_capacity",
176
+ "get_spark_settings",
177
+ "update_spark_settings",
178
+ "add_user_to_workspace",
179
+ "delete_user_from_workspace",
180
+ "update_workspace_user",
181
+ "list_workspace_users",
182
+ "assign_workspace_to_dataflow_storage",
183
+ "list_capacities",
154
184
  ]
@@ -40,7 +40,8 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
40
40
 
41
41
  if len(fallback_filt) > 0:
42
42
  print(
43
- f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
43
+ f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. "
44
+ "Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
44
45
  )
45
46
 
46
47
  # Potential model reduction estimate
@@ -78,10 +79,9 @@ def generate_measure_descriptions(
78
79
 
79
80
  validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
80
81
  if gpt_model not in validModels:
81
- print(
82
+ raise ValueError(
82
83
  f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
83
84
  )
84
- return
85
85
 
86
86
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
87
87
 
@@ -116,8 +116,7 @@ def generate_measure_descriptions(
116
116
  )
117
117
 
118
118
  # Update the model to use the new descriptions
119
- tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
120
- m = tom_server.Databases.GetByName(dataset).Model
119
+ # with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
121
120
 
122
121
  # for t in m.Tables:
123
122
  # tName = t.Name
@@ -148,10 +147,10 @@ def generate_aggs(
148
147
  import System
149
148
 
150
149
  # columns = {
151
- #'SalesAmount': 'Sum',
152
- #'ProductKey': 'GroupBy',
153
- #'OrderDateKey': 'GroupBy'
154
- # }
150
+ # 'SalesAmount': 'Sum',
151
+ # 'ProductKey': 'GroupBy',
152
+ # 'OrderDateKey': 'GroupBy'
153
+ # }
155
154
 
156
155
  if workspace is None:
157
156
  workspace_id = fabric.get_workspace_id()
@@ -173,48 +172,44 @@ def generate_aggs(
173
172
  numericTypes = ["Int64", "Double", "Decimal"]
174
173
 
175
174
  if any(value not in aggTypes for value in columns.values()):
176
- print(
175
+ raise ValueError(
177
176
  f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
178
177
  )
179
- return
180
178
 
181
179
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
182
180
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
183
181
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
184
182
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
185
183
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
186
- print(
184
+ raise ValueError(
187
185
  f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
188
186
  )
189
- return
190
187
 
191
188
  dfC_filtT = dfC[dfC["Table Name"] == table_name]
192
189
 
193
190
  if len(dfC_filtT) == 0:
194
- print(
191
+ raise ValueError(
195
192
  f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
196
193
  )
197
- return
198
194
 
199
195
  dfC_filt = dfC[
200
196
  (dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
201
197
  ]
202
198
 
203
199
  if len(columns) != len(dfC_filt):
204
- print(
200
+ raise ValueError(
205
201
  f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
206
202
  )
207
- return
208
203
 
209
204
  # Check if doing sum/count/min/max etc. on a non-number column
210
- for col, agg in columns.items():
211
- dfC_col = dfC_filt[dfC_filt["Column Name"] == col]
205
+ for cm, agg in columns.items():
206
+ dfC_col = dfC_filt[dfC_filt["Column Name"] == cm]
212
207
  dataType = dfC_col["Data Type"].iloc[0]
213
208
  if agg in aggTypesAggregate and dataType not in numericTypes:
214
- print(
215
- f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types."
209
+ raise ValueError(
210
+ f"{icons.red_dot} The '{cm}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types"
211
+ f" can be aggregated as '{aggTypesAggregate}' aggregation types."
216
212
  )
217
- return
218
213
 
219
214
  # Create/update lakehouse delta agg table
220
215
  aggSuffix = "_agg"
@@ -230,10 +225,10 @@ def generate_aggs(
230
225
  dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
231
226
 
232
227
  if len(dfI_filt) == 0:
233
- print(
234
- f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
228
+ raise ValueError(
229
+ f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in"
230
+ f" the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
235
231
  )
236
- return
237
232
 
238
233
  lakehouseName = dfI_filt["Display Name"].iloc[0]
239
234
  lakehouse_id = resolve_lakehouse_id(
@@ -243,8 +238,8 @@ def generate_aggs(
243
238
  # Generate SQL query
244
239
  query = "SELECT"
245
240
  groupBy = "\nGROUP BY"
246
- for col, agg in columns.items():
247
- colFilt = dfC_filt[dfC_filt["Column Name"] == col]
241
+ for cm, agg in columns.items():
242
+ colFilt = dfC_filt[dfC_filt["Column Name"] == cm]
248
243
  sourceCol = colFilt["Source"].iloc[0]
249
244
 
250
245
  if agg == "GroupBy":
@@ -284,7 +279,7 @@ def generate_aggs(
284
279
  # Create/update semantic model agg table
285
280
  tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
286
281
  m = tom_server.Databases.GetByName(dataset).Model
287
- f"\n{icons.in_progress} Updating the '{dataset}' semantic model..."
282
+ print(f"\n{icons.in_progress} Updating the '{dataset}' semantic model...")
288
283
  dfC_agg = dfC[dfC["Table Name"] == aggTableName]
289
284
 
290
285
  if len(dfC_agg) == 0:
@@ -348,7 +343,9 @@ def generate_aggs(
348
343
  col.DataType = System.Enum.Parse(TOM.DataType, dType)
349
344
 
350
345
  m.Tables[aggTableName].Columns.Add(col)
351
- print(f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added.")
346
+ print(
347
+ f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added."
348
+ )
352
349
 
353
350
  # Create relationships
354
351
  relMap = {"m": "Many", "1": "One", "0": "None"}
@@ -387,10 +384,11 @@ def generate_aggs(
387
384
  print(
388
385
  f"{icons.green_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
389
386
  )
390
- except:
387
+ except Exception as e:
391
388
  print(
392
389
  f"{icons.red_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
393
390
  )
391
+ print(f"Exception occured: {e}")
394
392
  elif toTable == table_name:
395
393
  try:
396
394
  rel.ToColumn = m.Tables[aggTableName].Columns[toColumn]
@@ -398,11 +396,12 @@ def generate_aggs(
398
396
  print(
399
397
  f"{icons.green_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
400
398
  )
401
- except:
399
+ except Exception as e:
402
400
  print(
403
401
  f"{icons.red_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
404
402
  )
405
- f"Relationship creation is complete."
403
+ print(f"Exception occured: {e}")
404
+ "Relationship creation is complete."
406
405
 
407
406
  # Create IF measure
408
407
  f"\n{icons.in_progress} Creating measure to check if the agg table can be used..."