semantic-link-labs 0.5.0__tar.gz → 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (89) hide show
  1. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/PKG-INFO +2 -2
  2. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/README.md +6 -6
  3. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/docs/source/conf.py +1 -1
  4. semantic_link_labs-0.6.0/notebooks/Tabular Object Model.ipynb +1 -0
  5. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/pyproject.toml +2 -2
  6. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/PKG-INFO +2 -2
  7. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/SOURCES.txt +1 -0
  8. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/requires.txt +1 -1
  9. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/__init__.py +19 -13
  10. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_ai.py +43 -24
  11. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_clear_cache.py +4 -5
  12. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_connections.py +77 -70
  13. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_dax.py +7 -9
  14. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_generate_semantic_model.py +55 -44
  15. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_helper_functions.py +13 -6
  16. semantic_link_labs-0.6.0/src/sempy_labs/_icons.py +24 -0
  17. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_list_functions.py +491 -304
  18. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_model_auto_build.py +4 -3
  19. semantic_link_labs-0.6.0/src/sempy_labs/_model_bpa.py +363 -0
  20. semantic_link_labs-0.6.0/src/sempy_labs/_model_bpa_rules.py +831 -0
  21. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_model_dependencies.py +14 -12
  22. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_one_lake_integration.py +11 -5
  23. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_query_scale_out.py +89 -81
  24. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_refresh_semantic_model.py +16 -10
  25. semantic_link_labs-0.6.0/src/sempy_labs/_translations.py +304 -0
  26. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/_vertipaq.py +53 -37
  27. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/__init__.py +2 -0
  28. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  29. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  30. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_fallback.py +5 -3
  31. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  32. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_get_shared_expression.py +4 -2
  33. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_guardrails.py +3 -3
  34. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  35. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  36. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  37. semantic_link_labs-0.6.0/src/sempy_labs/directlake/_update_directlake_partition_entity.py +197 -0
  38. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/directlake/_warm_cache.py +6 -3
  39. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  40. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  41. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_lakehouse.py +2 -1
  42. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/_shortcuts.py +19 -12
  43. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/__init__.py +1 -1
  44. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_create_pqt_file.py +21 -15
  45. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  46. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  47. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  48. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  49. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_migration_validation.py +2 -2
  50. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/migration/_refresh_calc_tables.py +8 -5
  51. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/report/__init__.py +2 -2
  52. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/report/_generate_report.py +10 -5
  53. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/report/_report_functions.py +67 -29
  54. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/report/_report_rebind.py +9 -8
  55. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/tom/__init__.py +1 -4
  56. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/tom/_model.py +555 -152
  57. semantic_link_labs-0.5.0/notebooks/Tabular Object Model.ipynb +0 -1
  58. semantic_link_labs-0.5.0/src/sempy_labs/_icons.py +0 -10
  59. semantic_link_labs-0.5.0/src/sempy_labs/_model_bpa.py +0 -1350
  60. semantic_link_labs-0.5.0/src/sempy_labs/_translations.py +0 -378
  61. semantic_link_labs-0.5.0/src/sempy_labs/directlake/_update_directlake_partition_entity.py +0 -74
  62. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  63. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  64. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.github/ISSUE_TEMPLATE/issue--question---advice-needed.md +0 -0
  65. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.github/workflows/build.yaml +0 -0
  66. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.github/workflows/codeql.yaml +0 -0
  67. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.gitignore +0 -0
  68. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.readthedocs.yaml +0 -0
  69. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/.vscode/settings.json +0 -0
  70. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/CODE_OF_CONDUCT.md +0 -0
  71. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/LICENSE +0 -0
  72. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/SECURITY.md +0 -0
  73. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/SUPPORT.md +0 -0
  74. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/docs/Makefile +0 -0
  75. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/docs/make.bat +0 -0
  76. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/docs/requirements.txt +0 -0
  77. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/docs/source/index.rst +0 -0
  78. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/docs/source/modules.rst +0 -0
  79. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/environment.yml +0 -0
  80. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/notebooks/Migration to Direct Lake.ipynb +0 -0
  81. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/notebooks/Model Optimization.ipynb +0 -0
  82. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/notebooks/Query Scale Out.ipynb +0 -0
  83. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/setup.cfg +0 -0
  84. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
  85. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
  86. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/src/sempy_labs/lakehouse/__init__.py +0 -0
  87. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/tests/__init__.py +0 -0
  88. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/tests/test_shortcuts.py +0 -0
  89. {semantic_link_labs-0.5.0 → semantic_link_labs-0.6.0}/tests/test_tom.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.5.0
3
+ Version: 0.6.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
14
14
  Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  License-File: LICENSE
17
- Requires-Dist: semantic-link-sempy>=0.7.5
17
+ Requires-Dist: semantic-link-sempy>=0.7.6
18
18
  Requires-Dist: anytree
19
19
  Requires-Dist: powerbiclient
20
20
  Provides-Extra: test
@@ -1,13 +1,15 @@
1
- # [semantic-link-labs](https://semantic-link-labs.readthedocs.io/en/0.5.0/)
1
+ # Semantic Link Labs
2
2
 
3
3
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
4
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.5.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
4
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.6.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
5
5
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
6
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
7
7
 
8
- All functions in this library are documented [here](https://semantic-link-labs.readthedocs.io/en/0.5.0/)!
8
+ ---
9
+ [Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
10
+ ---
9
11
 
10
- This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) and more!
12
+ This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
11
13
 
12
14
  Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
13
15
 
@@ -15,8 +17,6 @@ If you encounter any issues, please [raise a bug](https://github.com/microsoft/s
15
17
 
16
18
  If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
17
19
 
18
- ## [Function documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/)
19
-
20
20
  ## Install the library in a Fabric notebook
21
21
  ```python
22
22
  %pip install semantic-link-labs
@@ -13,7 +13,7 @@ sys.path.insert(0, os.path.abspath('../../src/'))
13
13
  project = 'semantic-link-labs'
14
14
  copyright = '2024, Microsoft and community'
15
15
  author = 'Microsoft and community'
16
- release = '0.5.0'
16
+ release = '0.6.0'
17
17
 
18
18
  # -- General configuration ---------------------------------------------------
19
19
  # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
@@ -0,0 +1 @@
1
+ {"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Connect to the [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))\n","Setting the 'readonly' property to False enables read/write mode. This allows changes to be made to the semantic model."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs.tom import connect_semantic_model\n","\n","dataset = '' # Enter dataset name\n","workspace = None # Enter workspace name\n","\n","with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," print(t.Name)"]},{"cell_type":"markdown","id":"fc6b277e","metadata":{},"source":["### Make changes to a semantic model using custom functions\n","Note that the custom functions have additional optional parameters (which may not be used in the examples below) for adding properties to model objects. Check the [documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) to see all available parameters for each function."]},{"cell_type":"markdown","id":"6d46d878","metadata":{},"source":["#### Rename objects in the semantic model"]},{"cell_type":"code","execution_count":null,"id":"1284825a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," t.Name = t.Name.replace('_',' ')\n"]},{"cell_type":"code","execution_count":null,"id":"d3b60303","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," c.Name = c.Name.replace('_',' ')"]},{"cell_type":"markdown","id":"402a477c","metadata":{},"source":["#### Add measure(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"bdaaaa5c","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\") "]},{"cell_type":"code","execution_count":null,"id":"a53a544b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Internet Sales':\n"," tom.add_measure(table_name = t.Name, measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name = t.Name, measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\")"]},{"cell_type":"markdown","id":"1cb1632f","metadata":{},"source":["#### Add column(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"81a22749","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_data_column(table_name ='Product', column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," tom.add_data_column(table_name = 'Segment', column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n","\n"," tom.add_calculated_column(table_name = 'Internet Sales', column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"code","execution_count":null,"id":"053b6516","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," elif t.Name == 'Segment':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n"," elif t.Name == 'Internet Sales':\n"," tom.add_calculated_column(table_name = t.Name, column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"markdown","id":"f53dcca7","metadata":{},"source":["#### Add hierarchies to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"a9309e23","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_hierarchy(table_name = 'Geography', hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"code","execution_count":null,"id":"a04281ce","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Geography':\n"," tom.add_hierarchy(table_name = t.Name, hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"markdown","id":"47c06a4f","metadata":{},"source":["#### Add relationship(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"e8cd7bbf","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_relationship(\n"," from_table = 'Internet Sales', from_column = 'ProductKey',\n"," to_table = 'Product', to_column = 'ProductKey', \n"," from_cardinality = 'Many', to_cardinality = 'One')"]},{"cell_type":"markdown","id":"3cc7f11e","metadata":{},"source":["#### Add a table with an M partition to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0f5dd66a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_m_partition(table_name = table_name, partition_name = table_name, expression = 'let....')"]},{"cell_type":"markdown","id":"ea389123","metadata":{},"source":["#### Add a table with an entity partition to a Direct Lake semantic model "]},{"cell_type":"code","execution_count":null,"id":"f75387d1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_entity_partition(table_name = table_name, entity_name = table_name)"]},{"cell_type":"markdown","id":"e74d0f54","metadata":{},"source":["#### Add a calculated table (and columns) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"934f7315","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_calculated_table(name = table_name, expression = \"DISTINCT('Product'[Color])\")\n"," tom.add_calculated_table_column(table_name = table_name, column_name = 'Color', source_column = \"'Product[Color]\", data_type = 'String')"]},{"cell_type":"markdown","id":"0e7088b7","metadata":{},"source":["#### Add role(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"ad60ebb9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_role(role_name = 'Reader')"]},{"cell_type":"markdown","id":"c541f81a","metadata":{},"source":["#### Set row level security (RLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"98603a08","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_rls(role_name ='Reader', table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"code","execution_count":null,"id":"effea009","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," tom.set_rls(role_name = r.Name, table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"markdown","id":"7fa7a03c","metadata":{},"source":["#### Set object level security (OLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"dd0def9d","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_ols(role_name = 'Reader', table_name = 'Product', column_name = 'Size', permission = 'None')"]},{"cell_type":"code","execution_count":null,"id":"7a389dc7","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.set_ols(role_name = r.Name, table_name = t.Name, column_name = 'Size', permission = 'None')"]},{"cell_type":"markdown","id":"d0f7ccd1","metadata":{},"source":["#### Add calculation groups and calculation items to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"97f4708b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_group(name = 'MyCalcGroup')"]},{"cell_type":"code","execution_count":null,"id":"fef68832","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"code","execution_count":null,"id":"c7653dcc","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'MyCalcGroup':\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"markdown","id":"c6450c74","metadata":{},"source":["#### Add translations to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"2b616b90","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_translation(language = 'it-IT')"]},{"cell_type":"code","execution_count":null,"id":"dc24c200","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_translation(object = tom.model.Tables['Product'], language = 'it-IT', property = 'Name', value = 'Produtto')"]},{"cell_type":"markdown","id":"3048cc95","metadata":{},"source":["#### Add a [Field Parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0a94af94","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_field_parameter(table_name = 'Parameter', objects = \"'Product'[Color], [Sales Amount], 'Geography'[Country]\")"]},{"cell_type":"markdown","id":"95aac09a","metadata":{},"source":["#### Remove an object(s) from a semantic model"]},{"cell_type":"code","execution_count":null,"id":"1e2572a8","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.remove_object(object = t.Columns['Size'])\n"," tom.remove_object(object = t.Hierarchies['Product Hierarchy'])"]},{"cell_type":"code","execution_count":null,"id":"bc453177","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.remove_object(object = tom.model.Tables['Product'].Columns['Size'])\n"," tom.remove_object(object = tom.model.Tables['Product'].Hierarchies['Product Hierarchy'])"]},{"cell_type":"markdown","id":"e0d0cb9e","metadata":{},"source":["### Custom functions to loop through non-top-level objects in a semantic model"]},{"cell_type":"code","execution_count":null,"id":"cbe3b1a3","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," print(c.Name)"]},{"cell_type":"code","execution_count":null,"id":"3f643e66","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for m in tom.all_measures():\n"," print(m.Name)"]},{"cell_type":"code","execution_count":null,"id":"ed1cde0f","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for p in tom.all_partitions():\n"," print(p.Name)"]},{"cell_type":"code","execution_count":null,"id":"f48014ae","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for h in tom.all_hierarchies():\n"," print(h.Name)"]},{"cell_type":"code","execution_count":null,"id":"9f5e7b72","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for ci in tom.all_calculation_items():\n"," print(ci.Name)"]},{"cell_type":"code","execution_count":null,"id":"3cd9ebc1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for l in tom.all_levels():\n"," print(l.Name)"]},{"cell_type":"code","execution_count":null,"id":"12c58bad","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for rls in tom.all_rls():\n"," print(rls.Name)"]},{"cell_type":"markdown","id":"1a294bd2","metadata":{},"source":["### See Vertipaq Analyzer stats"]},{"cell_type":"code","execution_count":null,"id":"469660e9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_vertipaq_annotations()\n","\n"," for t in tom.model.Tables:\n"," rc = tom.row_count(object = t)\n"," print(t.Name + ' : ' + str(rc))\n"," for c in t.Columns:\n"," col_size = tom.total_size(column = c)\n"," print(labs.format_dax_object_name(t.Name, c.Name) + ' : ' + str(col_size))"]},{"cell_type":"markdown","id":"1ab26dfd","metadata":{},"source":["### 'UsedIn' functions"]},{"cell_type":"code","execution_count":null,"id":"412bf287","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for h in tom.used_in_hierarchies(column = c):\n"," print(full_name + ' : ' + h.Name)"]},{"cell_type":"code","execution_count":null,"id":"76556900","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for r in tom.used_in_relationships(object = c):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(full_name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"4d9ec24e","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," for r in tom.used_in_relationships(object = t):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(t.Name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"82251336","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," dep = labs.get_model_calc_dependencies(dataset = dataset, workspace=workspace)\n"," for o in tom.used_in_rls(object = tom.model.Tables['Product'].Columns['Color'], dependencies=dep):\n"," print(o.Name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
@@ -7,7 +7,7 @@ name="semantic-link-labs"
7
7
  authors = [
8
8
  { name = "Microsoft Corporation" },
9
9
  ]
10
- version="0.5.0"
10
+ version="0.6.0"
11
11
  description="Semantic Link Labs project"
12
12
  requires-python=">=3.10,<3.12"
13
13
  classifiers = [
@@ -22,7 +22,7 @@ classifiers = [
22
22
  license= { text = "MIT License" }
23
23
 
24
24
  dependencies = [
25
- "semantic-link-sempy>=0.7.5",
25
+ "semantic-link-sempy>=0.7.6",
26
26
  "anytree",
27
27
  "powerbiclient"
28
28
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.5.0
3
+ Version: 0.6.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
14
14
  Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  License-File: LICENSE
17
- Requires-Dist: semantic-link-sempy>=0.7.5
17
+ Requires-Dist: semantic-link-sempy>=0.7.6
18
18
  Requires-Dist: anytree
19
19
  Requires-Dist: powerbiclient
20
20
  Provides-Extra: test
@@ -39,6 +39,7 @@ src/sempy_labs/_icons.py
39
39
  src/sempy_labs/_list_functions.py
40
40
  src/sempy_labs/_model_auto_build.py
41
41
  src/sempy_labs/_model_bpa.py
42
+ src/sempy_labs/_model_bpa_rules.py
42
43
  src/sempy_labs/_model_dependencies.py
43
44
  src/sempy_labs/_one_lake_integration.py
44
45
  src/sempy_labs/_query_scale_out.py
@@ -1,4 +1,4 @@
1
- semantic-link-sempy>=0.7.5
1
+ semantic-link-sempy>=0.7.6
2
2
  anytree
3
3
  powerbiclient
4
4
 
@@ -9,13 +9,15 @@ from sempy_labs._dax import evaluate_dax_impersonation
9
9
  from sempy_labs._generate_semantic_model import (
10
10
  create_blank_semantic_model,
11
11
  create_semantic_model_from_bim,
12
- # deploy_semantic_model,
12
+ deploy_semantic_model,
13
13
  get_semantic_model_bim,
14
14
  )
15
15
  from sempy_labs._list_functions import (
16
+ delete_custom_pool,
16
17
  list_semantic_model_objects,
17
18
  list_shortcuts,
18
19
  get_object_level_security,
20
+ list_capacities,
19
21
  # list_annotations,
20
22
  # list_columns,
21
23
  list_dashboards,
@@ -66,8 +68,10 @@ from sempy_labs._helper_functions import (
66
68
  resolve_report_name,
67
69
  # language_validate
68
70
  )
71
+
69
72
  # from sempy_labs._model_auto_build import model_auto_build
70
- from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
73
+ from sempy_labs._model_bpa import run_model_bpa
74
+ from sempy_labs._model_bpa_rules import model_bpa_rules
71
75
  from sempy_labs._model_dependencies import (
72
76
  measure_dependency_tree,
73
77
  get_measure_dependencies,
@@ -97,6 +101,7 @@ from sempy_labs._vertipaq import (
97
101
  )
98
102
 
99
103
  __all__ = [
104
+ "delete_custom_pool",
100
105
  "clear_cache",
101
106
  # create_connection_cloud,
102
107
  # create_connection_vnet,
@@ -104,7 +109,7 @@ __all__ = [
104
109
  "evaluate_dax_impersonation",
105
110
  "create_blank_semantic_model",
106
111
  "create_semantic_model_from_bim",
107
- #'deploy_semantic_model',
112
+ "deploy_semantic_model",
108
113
  "get_semantic_model_bim",
109
114
  "get_object_level_security",
110
115
  #'list_annotations',
@@ -125,7 +130,7 @@ __all__ = [
125
130
  #'list_sqlendpoints',
126
131
  #'list_tables',
127
132
  "list_warehouses",
128
- 'list_workspace_role_assignments',
133
+ "list_workspace_role_assignments",
129
134
  "create_warehouse",
130
135
  "update_item",
131
136
  "create_abfss_path",
@@ -141,20 +146,20 @@ __all__ = [
141
146
  "resolve_report_id",
142
147
  "resolve_report_name",
143
148
  #'language_validate',
144
- #"model_auto_build",
149
+ # "model_auto_build",
145
150
  "model_bpa_rules",
146
151
  "run_model_bpa",
147
152
  "measure_dependency_tree",
148
153
  "get_measure_dependencies",
149
154
  "get_model_calc_dependencies",
150
155
  "export_model_to_onelake",
151
- 'qso_sync',
152
- 'qso_sync_status',
153
- 'set_qso',
154
- 'list_qso_settings',
155
- 'disable_qso',
156
- 'set_semantic_model_storage_format',
157
- 'set_workspace_default_storage_format',
156
+ "qso_sync",
157
+ "qso_sync_status",
158
+ "set_qso",
159
+ "list_qso_settings",
160
+ "disable_qso",
161
+ "set_semantic_model_storage_format",
162
+ "set_workspace_default_storage_format",
158
163
  "refresh_semantic_model",
159
164
  "cancel_dataset_refresh",
160
165
  "translate_semantic_model",
@@ -174,5 +179,6 @@ __all__ = [
174
179
  "delete_user_from_workspace",
175
180
  "update_workspace_user",
176
181
  "list_workspace_users",
177
- "assign_workspace_to_dataflow_storage"
182
+ "assign_workspace_to_dataflow_storage",
183
+ "list_capacities",
178
184
  ]
@@ -14,7 +14,6 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
14
14
  from ._model_bpa import run_model_bpa
15
15
  from .directlake._fallback import check_fallback_reason
16
16
  from ._helper_functions import format_dax_object_name
17
- from sempy_labs.tom import connect_semantic_model
18
17
 
19
18
  modelBPA = run_model_bpa(
20
19
  dataset=dataset, workspace=workspace, return_dataframe=True
@@ -41,7 +40,8 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
41
40
 
42
41
  if len(fallback_filt) > 0:
43
42
  print(
44
- f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
43
+ f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. "
44
+ "Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
45
45
  )
46
46
 
47
47
  # Potential model reduction estimate
@@ -79,7 +79,9 @@ def generate_measure_descriptions(
79
79
 
80
80
  validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
81
81
  if gpt_model not in validModels:
82
- raise ValueError(f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}.")
82
+ raise ValueError(
83
+ f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
84
+ )
83
85
 
84
86
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
85
87
 
@@ -114,8 +116,7 @@ def generate_measure_descriptions(
114
116
  )
115
117
 
116
118
  # Update the model to use the new descriptions
117
- #with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
118
-
119
+ # with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
119
120
 
120
121
  # for t in m.Tables:
121
122
  # tName = t.Name
@@ -146,10 +147,10 @@ def generate_aggs(
146
147
  import System
147
148
 
148
149
  # columns = {
149
- #'SalesAmount': 'Sum',
150
- #'ProductKey': 'GroupBy',
151
- #'OrderDateKey': 'GroupBy'
152
- # }
150
+ # 'SalesAmount': 'Sum',
151
+ # 'ProductKey': 'GroupBy',
152
+ # 'OrderDateKey': 'GroupBy'
153
+ # }
153
154
 
154
155
  if workspace is None:
155
156
  workspace_id = fabric.get_workspace_id()
@@ -171,33 +172,44 @@ def generate_aggs(
171
172
  numericTypes = ["Int64", "Double", "Decimal"]
172
173
 
173
174
  if any(value not in aggTypes for value in columns.values()):
174
- raise ValueError(f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}.")
175
+ raise ValueError(
176
+ f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
177
+ )
175
178
 
176
179
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
177
180
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
178
181
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
179
182
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
180
183
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
181
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models.")
182
-
184
+ raise ValueError(
185
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
186
+ )
187
+
183
188
  dfC_filtT = dfC[dfC["Table Name"] == table_name]
184
189
 
185
190
  if len(dfC_filtT) == 0:
186
- raise ValueError(f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace.")
191
+ raise ValueError(
192
+ f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
193
+ )
187
194
 
188
195
  dfC_filt = dfC[
189
196
  (dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
190
197
  ]
191
198
 
192
199
  if len(columns) != len(dfC_filt):
193
- raise ValueError(f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace.")
200
+ raise ValueError(
201
+ f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
202
+ )
194
203
 
195
204
  # Check if doing sum/count/min/max etc. on a non-number column
196
- for col, agg in columns.items():
197
- dfC_col = dfC_filt[dfC_filt["Column Name"] == col]
205
+ for cm, agg in columns.items():
206
+ dfC_col = dfC_filt[dfC_filt["Column Name"] == cm]
198
207
  dataType = dfC_col["Data Type"].iloc[0]
199
208
  if agg in aggTypesAggregate and dataType not in numericTypes:
200
- raise ValueError(f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types.")
209
+ raise ValueError(
210
+ f"{icons.red_dot} The '{cm}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types"
211
+ f" can be aggregated as '{aggTypesAggregate}' aggregation types."
212
+ )
201
213
 
202
214
  # Create/update lakehouse delta agg table
203
215
  aggSuffix = "_agg"
@@ -213,7 +225,10 @@ def generate_aggs(
213
225
  dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
214
226
 
215
227
  if len(dfI_filt) == 0:
216
- raise ValueError(f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter.")
228
+ raise ValueError(
229
+ f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in"
230
+ f" the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
231
+ )
217
232
 
218
233
  lakehouseName = dfI_filt["Display Name"].iloc[0]
219
234
  lakehouse_id = resolve_lakehouse_id(
@@ -223,8 +238,8 @@ def generate_aggs(
223
238
  # Generate SQL query
224
239
  query = "SELECT"
225
240
  groupBy = "\nGROUP BY"
226
- for col, agg in columns.items():
227
- colFilt = dfC_filt[dfC_filt["Column Name"] == col]
241
+ for cm, agg in columns.items():
242
+ colFilt = dfC_filt[dfC_filt["Column Name"] == cm]
228
243
  sourceCol = colFilt["Source"].iloc[0]
229
244
 
230
245
  if agg == "GroupBy":
@@ -328,7 +343,9 @@ def generate_aggs(
328
343
  col.DataType = System.Enum.Parse(TOM.DataType, dType)
329
344
 
330
345
  m.Tables[aggTableName].Columns.Add(col)
331
- print(f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added.")
346
+ print(
347
+ f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added."
348
+ )
332
349
 
333
350
  # Create relationships
334
351
  relMap = {"m": "Many", "1": "One", "0": "None"}
@@ -367,10 +384,11 @@ def generate_aggs(
367
384
  print(
368
385
  f"{icons.green_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
369
386
  )
370
- except:
387
+ except Exception as e:
371
388
  print(
372
389
  f"{icons.red_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
373
390
  )
391
+ print(f"Exception occured: {e}")
374
392
  elif toTable == table_name:
375
393
  try:
376
394
  rel.ToColumn = m.Tables[aggTableName].Columns[toColumn]
@@ -378,11 +396,12 @@ def generate_aggs(
378
396
  print(
379
397
  f"{icons.green_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
380
398
  )
381
- except:
399
+ except Exception as e:
382
400
  print(
383
401
  f"{icons.red_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
384
402
  )
385
- f"Relationship creation is complete."
403
+ print(f"Exception occured: {e}")
404
+ "Relationship creation is complete."
386
405
 
387
406
  # Create IF measure
388
407
  f"\n{icons.in_progress} Creating measure to check if the agg table can be used..."
@@ -1,7 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from ._helper_functions import resolve_dataset_id
4
- from typing import List, Optional, Union
3
+ from typing import Optional
5
4
  import sempy_labs._icons as icons
6
5
 
7
6
 
@@ -25,10 +24,10 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
25
24
  datasetID = resolve_dataset_id(dataset=dataset, workspace=workspace)
26
25
 
27
26
  xmla = f"""
28
- <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
27
+ <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
29
28
  <Object>
30
- <DatabaseID>{datasetID}</DatabaseID>
31
- </Object>
29
+ <DatabaseID>{datasetID}</DatabaseID>
30
+ </Object>
32
31
  </ClearCache>
33
32
  """
34
33
  fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
@@ -1,8 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from typing import List, Optional, Union
5
- import sempy_labs._icons as icons
3
+ from sempy.fabric.exceptions import FabricHTTPException
6
4
 
7
5
 
8
6
  def create_connection_cloud(
@@ -56,29 +54,32 @@ def create_connection_cloud(
56
54
  },
57
55
  }
58
56
 
59
- response = client.post(f"/v1/connections", json=request_body)
60
-
61
- if response.status_code == 200:
62
- o = response.json()
63
- new_data = {
64
- "Connection Id": o.get("id"),
65
- "Connection Name": o.get("name"),
66
- "Connectivity Type": o.get("connectivityType"),
67
- "Connection Type": o.get("connectionDetails",{}).get("type"),
68
- "Connection Path": o.get("connectionDetails",{}).get("path"),
69
- "Privacy Level": o.get("privacyLevel"),
70
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
71
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
72
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
73
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
74
- }
75
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
57
+ response = client.post("/v1/connections", json=request_body)
58
+
59
+ if response.status_code != 200:
60
+ raise FabricHTTPException(response)
61
+ o = response.json()
62
+ new_data = {
63
+ "Connection Id": o.get("id"),
64
+ "Connection Name": o.get("name"),
65
+ "Connectivity Type": o.get("connectivityType"),
66
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
67
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
68
+ "Privacy Level": o.get("privacyLevel"),
69
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
70
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
71
+ "Connection Encryption": o.get("credentialDetails", {}).get(
72
+ "connectionEncryption"
73
+ ),
74
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
75
+ "skipTestConnection"
76
+ ),
77
+ }
78
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
76
79
 
77
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
80
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
78
81
 
79
- return df
80
- else:
81
- print(f"{icons.red_dot} {response.status_code}")
82
+ return df
82
83
 
83
84
 
84
85
  def create_connection_on_prem(
@@ -131,30 +132,33 @@ def create_connection_on_prem(
131
132
  },
132
133
  }
133
134
 
134
- response = client.post(f"/v1/connections", json=request_body)
135
-
136
- if response.status_code == 200:
137
- o = response.json()
138
- new_data = {
139
- "Connection Id": o.get("id"),
140
- "Connection Name": o.get("name"),
141
- "Gateway ID": o.get("gatewayId"),
142
- "Connectivity Type": o.get("connectivityType"),
143
- "Connection Type": o.get("connectionDetails",{}).get("type"),
144
- "Connection Path": o.get("connectionDetails",{}).get("path"),
145
- "Privacy Level": o.get("privacyLevel"),
146
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
147
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
148
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
149
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
150
- }
151
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+ response = client.post("/v1/connections", json=request_body)
136
+
137
+ if response.status_code != 200:
138
+ raise FabricHTTPException(response)
139
+ o = response.json()
140
+ new_data = {
141
+ "Connection Id": o.get("id"),
142
+ "Connection Name": o.get("name"),
143
+ "Gateway ID": o.get("gatewayId"),
144
+ "Connectivity Type": o.get("connectivityType"),
145
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
146
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
147
+ "Privacy Level": o.get("privacyLevel"),
148
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
149
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
150
+ "Connection Encryption": o.get("credentialDetails", {}).get(
151
+ "connectionEncryption"
152
+ ),
153
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
154
+ "skipTestConnection"
155
+ ),
156
+ }
157
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
152
158
 
153
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
159
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
154
160
 
155
- return df
156
- else:
157
- print(f"{icons.red_dot} {response.status_code}")
161
+ return df
158
162
 
159
163
 
160
164
  def create_connection_vnet(
@@ -209,27 +213,30 @@ def create_connection_vnet(
209
213
  },
210
214
  }
211
215
 
212
- response = client.post(f"/v1/connections", json=request_body)
213
-
214
- if response.status_code == 200:
215
- o = response.json()
216
- new_data = {
217
- "Connection Id": o.get("id"),
218
- "Connection Name": o.get("name"),
219
- "Gateway ID": o.get("gatewayId"),
220
- "Connectivity Type": o.get("connectivityType"),
221
- "Connection Type": o.get("connectionDetails",{}).get("type"),
222
- "Connection Path": o.get("connectionDetails",{}).get("path"),
223
- "Privacy Level": o.get("privacyLevel"),
224
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
225
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
226
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
227
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
228
- }
229
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
230
-
231
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
232
-
233
- return df
234
- else:
235
- print(f"{icons.red_dot} {response.status_code}")
216
+ response = client.post("/v1/connections", json=request_body)
217
+
218
+ if response.status_code != 200:
219
+ raise FabricHTTPException(response)
220
+ o = response.json()
221
+ new_data = {
222
+ "Connection Id": o.get("id"),
223
+ "Connection Name": o.get("name"),
224
+ "Gateway ID": o.get("gatewayId"),
225
+ "Connectivity Type": o.get("connectivityType"),
226
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
227
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
228
+ "Privacy Level": o.get("privacyLevel"),
229
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
230
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
231
+ "Connection Encryption": o.get("credentialDetails", {}).get(
232
+ "connectionEncryption"
233
+ ),
234
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
235
+ "skipTestConnection"
236
+ ),
237
+ }
238
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
239
+
240
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
241
+
242
+ return df