semantic-link-labs 0.8.4__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (49) hide show
  1. {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +8 -3
  2. {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +49 -47
  3. {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +29 -1
  5. sempy_labs/_data_pipelines.py +3 -3
  6. sempy_labs/_dataflows.py +116 -3
  7. sempy_labs/_dax.py +189 -3
  8. sempy_labs/_deployment_pipelines.py +3 -3
  9. sempy_labs/_environments.py +3 -3
  10. sempy_labs/_eventhouses.py +3 -3
  11. sempy_labs/_eventstreams.py +3 -3
  12. sempy_labs/_external_data_shares.py +1 -1
  13. sempy_labs/_generate_semantic_model.py +3 -3
  14. sempy_labs/_git.py +7 -7
  15. sempy_labs/_helper_functions.py +25 -1
  16. sempy_labs/_kql_databases.py +3 -3
  17. sempy_labs/_kql_querysets.py +3 -3
  18. sempy_labs/_mirrored_databases.py +428 -0
  19. sempy_labs/_mirrored_warehouses.py +1 -1
  20. sempy_labs/_ml_experiments.py +3 -3
  21. sempy_labs/_ml_models.py +4 -4
  22. sempy_labs/_model_bpa.py +209 -180
  23. sempy_labs/_model_bpa_bulk.py +41 -23
  24. sempy_labs/_model_dependencies.py +41 -87
  25. sempy_labs/_notebooks.py +2 -2
  26. sempy_labs/_query_scale_out.py +4 -4
  27. sempy_labs/_refresh_semantic_model.py +2 -2
  28. sempy_labs/_spark.py +6 -6
  29. sempy_labs/_vertipaq.py +31 -19
  30. sempy_labs/_warehouses.py +3 -3
  31. sempy_labs/_workspace_identity.py +2 -2
  32. sempy_labs/_workspaces.py +7 -7
  33. sempy_labs/admin/__init__.py +2 -0
  34. sempy_labs/admin/_basic_functions.py +54 -8
  35. sempy_labs/admin/_domains.py +1 -1
  36. sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  37. sempy_labs/directlake/_warm_cache.py +10 -9
  38. sempy_labs/lakehouse/_get_lakehouse_tables.py +1 -1
  39. sempy_labs/lakehouse/_shortcuts.py +2 -2
  40. sempy_labs/migration/_create_pqt_file.py +5 -2
  41. sempy_labs/report/__init__.py +2 -0
  42. sempy_labs/report/_download_report.py +75 -0
  43. sempy_labs/report/_generate_report.py +3 -3
  44. sempy_labs/report/_report_functions.py +3 -3
  45. sempy_labs/report/_report_rebind.py +1 -1
  46. sempy_labs/report/_reportwrapper.py +4 -2
  47. sempy_labs/tom/_model.py +71 -35
  48. {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
  49. {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.8.4
3
+ Version: 0.8.5
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -15,7 +15,7 @@ Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
- Requires-Dist: semantic-link-sempy >=0.8.1
18
+ Requires-Dist: semantic-link-sempy >=0.8.3
19
19
  Requires-Dist: anytree
20
20
  Requires-Dist: powerbiclient
21
21
  Requires-Dist: polib
@@ -27,7 +27,7 @@ Requires-Dist: pytest >=8.2.1 ; extra == 'test'
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.4&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.5&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -41,6 +41,10 @@ If you encounter any issues, please [raise a bug](https://github.com/microsoft/s
41
41
 
42
42
  If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
43
43
 
44
+ Check out the video below for an introduction to Semantic Link, Semantic Link Labs and demos of key features!
45
+
46
+ [![Semantic Link Labs Video](https://img.youtube.com/vi/LSoWDEZk9b0/0.jpg)](https://www.youtube.com/watch?v=LSoWDEZk9b0)
47
+
44
48
  ## Featured Scenarios
45
49
  * Semantic Models
46
50
  * [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
@@ -130,6 +134,7 @@ An even better way to ensure the semantic-link-labs library is available in your
130
134
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
131
135
 
132
136
  ## Version History
137
+ * [0.8.5](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.5) (November 13, 2024)
133
138
  * [0.8.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.4) (October 30, 2024)
134
139
  * [0.8.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.3) (October 14, 2024)
135
140
  * [0.8.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.2) (October 2, 2024)
@@ -1,46 +1,47 @@
1
- sempy_labs/__init__.py,sha256=ACWw4wFdmSU5h1jhsfCmVOgLXM3fBkRuP2XQB9i3J-E,11120
1
+ sempy_labs/__init__.py,sha256=m-MEq0ztOEFilCXIMz6NhseGu9WqqxFZPUz6wl26L_c,11920
2
2
  sempy_labs/_ai.py,sha256=CzsNw6Wpd2B5Rd0RcY250-_p0L-0gFoMNLEc_KmrobU,16177
3
3
  sempy_labs/_capacities.py,sha256=HWX1ivlWpyS7Ea_ny-39kUAQYFGMzo42kWMGdJMINos,25466
4
4
  sempy_labs/_capacity_migration.py,sha256=PCIodWXas9v7q93hqD2m8EanJHPJzke52jbCWOfnLZk,27764
5
5
  sempy_labs/_clear_cache.py,sha256=ttHsXXR6SRRw4eC0cS8I5h38UbWU9YJii1y-uR9R3KM,12493
6
6
  sempy_labs/_connections.py,sha256=w1NeC9auc07ko2pY3m5vM_9-sgW_chw1OxPzDviVC5k,12179
7
- sempy_labs/_data_pipelines.py,sha256=VpsK4tIjzxwpt2dvtyGgWF5ULBrKpI5qQ_z__w-IseE,5641
8
- sempy_labs/_dataflows.py,sha256=lFoLS3CRzoHgQd0PV6UCceEUAkhOVuqyou3i5v7X1ew,4496
9
- sempy_labs/_dax.py,sha256=dt1GgHceyM7f6phRBPxRKnmQy_KYKpcgFQHuOjGbpLo,2029
10
- sempy_labs/_deployment_pipelines.py,sha256=-jRCt-QiG6rDg0SxwFXjoE0XXrB_DbqvM92fax2UWBs,6024
7
+ sempy_labs/_data_pipelines.py,sha256=WdZjTELNuN_7suWj6NrZUxGnMTzAgIxFw8V6YMb8ags,5644
8
+ sempy_labs/_dataflows.py,sha256=h_iYfNQTkNR7iGPQMO47s5eVR3CrpL04pN50xQl63ac,8253
9
+ sempy_labs/_dax.py,sha256=oYNwMaHusxoqQmfi_S6iF2X5o29dCM7cb2eIiLpFlas,8605
10
+ sempy_labs/_deployment_pipelines.py,sha256=WBBQM85-3-MkXb5OmRPF6U83xLyhKSlYUyhRlkvcl4k,6027
11
11
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
12
- sempy_labs/_environments.py,sha256=jM8jDt0dG3D5_wgmmfVb4UPyo59gbULb2WrJOYrSUIo,5323
13
- sempy_labs/_eventhouses.py,sha256=30BrCSWBDn0xyBeHRCdVCgcCGCG9t3Y2TtD_LVQ3Vj4,4033
14
- sempy_labs/_eventstreams.py,sha256=tYGwwLMUr3PU-nz1ApQoYqwa3MsXITvMimrpeDweHtM,4058
15
- sempy_labs/_external_data_shares.py,sha256=ZscP8PUEZlc6Q4SlSDtfVOVvo78Ct-t9R5xRU6PTsIQ,6798
16
- sempy_labs/_generate_semantic_model.py,sha256=7HwDPbnMKjd7yJ7EeoNS95_pmDQlK4JyLEP6QbNZ0gY,14473
17
- sempy_labs/_git.py,sha256=IADzLF9Y-PRsM_4wv8Z1B9xa8ytZr9RFoJVG10VduG0,13158
18
- sempy_labs/_helper_functions.py,sha256=P-QwdHD3EkGLpXN6fogQDasyIqjB3cy3hQLxOR_YVsY,37390
12
+ sempy_labs/_environments.py,sha256=avpLSfZyyQFdEDIIxWv2THLjPZwbs9XGXT7ob9l_-ao,5326
13
+ sempy_labs/_eventhouses.py,sha256=vgIFQkXcBPC4SnlrBzT7SRmembExxkm6n0gdKnc7Hlk,4036
14
+ sempy_labs/_eventstreams.py,sha256=Rht0eWoZbYF6XKtE3AOUvGgA21smxC9gdN599z-jY3s,4061
15
+ sempy_labs/_external_data_shares.py,sha256=lUsKy1mexNSmhyFwxSeE2jZKNdDAWDP6iC6UPTXCvyU,6799
16
+ sempy_labs/_generate_semantic_model.py,sha256=ktyPjP0BxE-keQm_VB9siOEGcmKc4_EZRlIKopJFeOM,14476
17
+ sempy_labs/_git.py,sha256=LrerJoooP9z5uUPR_nKyLHxxuZQWguSale8Tuo9SF_M,13163
18
+ sempy_labs/_helper_functions.py,sha256=zOctuWIPfUunBRGmAQ0W-lyQgliEJtpsg0D8pqYTo-4,37788
19
19
  sempy_labs/_icons.py,sha256=ez2dx_LCti71S_-eB6WYQ-kOMyiBL8ZJN12-ev5dcmA,3579
20
- sempy_labs/_kql_databases.py,sha256=262dcVE0PyqyWWhxWuFBw2VBFo9VzOjNFXGdVioJzww,4618
21
- sempy_labs/_kql_querysets.py,sha256=ggXNbwiiWYTaF2DzPBzQ8O0mHMYlLO2GxFYQ9W5Hdp8,4154
20
+ sempy_labs/_kql_databases.py,sha256=oNX9oKnXu5SLkzl4kTMQguh4In-i-0Forcoy1phOe1s,4621
21
+ sempy_labs/_kql_querysets.py,sha256=A-79LiLBKxlADPTuSK9ipy_LjXKcsJZwQHknUXFpVl0,4157
22
22
  sempy_labs/_list_functions.py,sha256=5bjRee8aAhW-ddSQNDmwOdQwWTrDSBR1uRZjt1wQLQ4,55426
23
23
  sempy_labs/_managed_private_endpoints.py,sha256=bCuC9V4yMFBw1BNlsoxARdIEMPAySW-ljHrhvuziQfw,6179
24
- sempy_labs/_mirrored_warehouses.py,sha256=Gsf4KE4cMEJjIb1F1aVAG7lnvmKJ4CjDjRUDraOHd6k,1763
25
- sempy_labs/_ml_experiments.py,sha256=4oA5hitmXjRx-4jsL_0bxn2ehBrmNVZg_rrt1OQXCb8,4193
26
- sempy_labs/_ml_models.py,sha256=8tRlsWG4zfKBOSbotjcbChrj2Vv5xC9U-yFpI2RROLc,4044
24
+ sempy_labs/_mirrored_databases.py,sha256=5_5phu50KIvhHNQJ-RQAxd92W4D7GUVMyjAnOb7ZY3Q,14360
25
+ sempy_labs/_mirrored_warehouses.py,sha256=t2fBH5L0UzNahDB4lATDLvmCqYTU-V93_ZVLb5ZISSg,1764
26
+ sempy_labs/_ml_experiments.py,sha256=UVh3cwNvpY-otCBIaKW-sgtzyjwAuu8qJDLhZGBHToE,4196
27
+ sempy_labs/_ml_models.py,sha256=phYLySjN7MO2YYfq7ZQKMS6w18G6L1-7DdNWB4fcLjQ,4044
27
28
  sempy_labs/_model_auto_build.py,sha256=-qDwmFx3KMuIaaUU8CzmCX7CF7ZUVWWu-_GH2UhAU_8,5045
28
- sempy_labs/_model_bpa.py,sha256=EGbBLD28vx1TCf4cg_lZ2fVCZsCZb9UD-PB6gTA_ndE,20589
29
- sempy_labs/_model_bpa_bulk.py,sha256=KbXFZDFGqmBKkp_-delfFo6R6KI2rgz_13-ZHPeF6Bc,14782
29
+ sempy_labs/_model_bpa.py,sha256=So6-33PiogMBXwtt_drPsBDNVBp7rXJm-caADuXKseE,22268
30
+ sempy_labs/_model_bpa_bulk.py,sha256=W7BOOI-sgMTIoKr8_NnhJswPK4t9x79hdP5kTVYpdkY,15690
30
31
  sempy_labs/_model_bpa_rules.py,sha256=96_GkXQGhON-_uyUATgUibk4W9y7e9wl1QciUr96gIQ,45544
31
- sempy_labs/_model_dependencies.py,sha256=rFPVDA6gLKxy2rDPtHHIvVJF9SmJz4xRe4n922bzWtA,13452
32
- sempy_labs/_notebooks.py,sha256=5f7NkilmTuy6nBhAqYxB8PFfPIQnngz0IeouoPC0Ioo,7421
32
+ sempy_labs/_model_dependencies.py,sha256=iQLDiwvClmaV6fHH0nRGksHG_GBmJK3NW4stIitmCSQ,11647
33
+ sempy_labs/_notebooks.py,sha256=EUYVeRJrCL9IllQevwRxzkCUU-rzX6KEEH7x7mBYUqc,7422
33
34
  sempy_labs/_one_lake_integration.py,sha256=eIuLxlw8eXfUH2avKhsyLmXZbTllSwGsz2j_HMAikpQ,6234
34
- sempy_labs/_query_scale_out.py,sha256=ZURKY4SWvhroYyisrcI9vBgxiXiYhGpu86POYlsSd-o,15704
35
- sempy_labs/_refresh_semantic_model.py,sha256=ycgjOQU43fOb_ws8WUGxHHTCswxOXObM1PEtUhNisq8,16761
36
- sempy_labs/_spark.py,sha256=vZIEERtig3COPhFrPwcMDG9G9xc4tyggubj_HrB6FVs,20103
35
+ sempy_labs/_query_scale_out.py,sha256=xoHnuDUgPYsg-NlUplB9ieb0bClcBQeG4veJNo_4TNw,15708
36
+ sempy_labs/_refresh_semantic_model.py,sha256=SnakEoE04yVvihK4NW3Kx4csWMuYCZfp9oFM7-KNgVU,16795
37
+ sempy_labs/_spark.py,sha256=RIJt9b_l5Sp5XrebhvRD0DEBKDTQdA8Rh7fByV27ngQ,20109
37
38
  sempy_labs/_sql.py,sha256=KttKi95iGxTT8UA1QOpT9ygAdwCfHHlcQSQ5d9gml0E,5358
38
39
  sempy_labs/_translations.py,sha256=2DpP--U3d8Gp7o9LF-OnZa10onN2unvqSHVQHv3CBZg,19838
39
- sempy_labs/_vertipaq.py,sha256=dapyk0X76FbP5CIn0bZp7H0YgM99IrYpLjnVOliDv5w,36995
40
- sempy_labs/_warehouses.py,sha256=rf_UWyXx9Zyyr7n23YjP1WKe3uP1zB0eVUr1OH9bGgw,7202
40
+ sempy_labs/_vertipaq.py,sha256=sS9wFPxZfr_5dsOIXd-oeQIeCyXkVeCHbp30Kd7raUU,37662
41
+ sempy_labs/_warehouses.py,sha256=KI7Ww5Slw4jfhby4ensGVlDHLWq6u2SvdMCa2R9i778,7205
41
42
  sempy_labs/_workloads.py,sha256=x3dS2mOkrS9rA-p70z8849DZlMIvMbzTjMzO_YmnHRg,4449
42
- sempy_labs/_workspace_identity.py,sha256=WQx6jcRlAsdoro_SYWgwACe_Cw9aN8JwWTc0LL-bZzY,2259
43
- sempy_labs/_workspaces.py,sha256=lKVPIkmSiT2XGo24zLE3Pcw7jczgKe86vyWOGjC3ZVQ,11193
43
+ sempy_labs/_workspace_identity.py,sha256=d5cdiiqjyUVoSoDiqU8mzWYOvbt2oJrt7sm-ZGEEkDk,2261
44
+ sempy_labs/_workspaces.py,sha256=YdLuKBWBqvzYoGZSkWeAce-XxardzjpnpiaCMiE1aGI,11200
44
45
  sempy_labs/_bpa_translation/_model/_translations_am-ET.po,sha256=zQVjJ-t0vtgIYan-HaXtUVJLB_PJvB53Nf5BNoOReU4,39199
45
46
  sempy_labs/_bpa_translation/_model/_translations_ar-AE.po,sha256=QP1PjDLFccLDs9zq456crdAST57wrcWVk5rRiqqoCws,36959
46
47
  sempy_labs/_bpa_translation/_model/_translations_bg-BG.po,sha256=sqezjpS3wfk09WD7x27bHoCBtgmqeHtyHNKTwG7-bkI,44132
@@ -79,9 +80,9 @@ sempy_labs/_bpa_translation/_model/_translations_tr-TR.po,sha256=NdW-X4E0QmeLKM0
79
80
  sempy_labs/_bpa_translation/_model/_translations_uk-UA.po,sha256=3NsFN8hoor_5L6738FjpJ8o4spwp8FNFGbVItHD-_ec,43500
80
81
  sempy_labs/_bpa_translation/_model/_translations_zh-CN.po,sha256=ipMbnet7ZI5mZoC8KonYKVwGmFLHFB_9KIDOoBgSNfo,26815
81
82
  sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po,sha256=5v6tVKGruqneAeMoa6F3tyg_JBL8qOpqOJofWpq2W3U,31518
82
- sempy_labs/admin/__init__.py,sha256=m0INXWa0DcVq1tTGWbQAOYeFOaFg435eWmo_uHk59b0,1499
83
- sempy_labs/admin/_basic_functions.py,sha256=F6jyO4XSfZ-5bjV1YvjN371113NKbI_ma3c1F9Xbm3A,35577
84
- sempy_labs/admin/_domains.py,sha256=r4qKHZsDMZV1dg9qprniVCX0WrJJ1KTTuiqvFYUxYHA,12347
83
+ sempy_labs/admin/__init__.py,sha256=4TdPPh5SW3cBrtZXnTsiFQnOir5nwP5oU4kX9m5vINA,1561
84
+ sempy_labs/admin/_basic_functions.py,sha256=ITCDRd45ygorwj74rZr5WoV6-6-OeCJ9Jejp6UMr6uY,37181
85
+ sempy_labs/admin/_domains.py,sha256=tVjUiV4bLdVKl665ouYGfzYPFsRhPwYHYy7efCncvsE,12337
85
86
  sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiVvlq8,2131
86
87
  sempy_labs/directlake/_directlake_schema_compare.py,sha256=ocHFU6E6HSKgcNLywGM0dx0ie9AXYwk-E7o7EYcqiN4,4422
87
88
  sempy_labs/directlake/_directlake_schema_sync.py,sha256=fhh6Xjd42HjI5x_Ejwq1N4qqnXQsKpXmyPcYl7cNG6A,4151
@@ -93,15 +94,15 @@ sempy_labs/directlake/_guardrails.py,sha256=elPIrqBy7gX7ZhVC7dBqhuFohET9QX9cCX1H
93
94
  sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=_rpnbgsFAz2W16PpgIOB0Rj_Fs1ZKrDbz3DUaaR_bfU,2143
94
95
  sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=xm6bih0mbYfWP6ca8BKMZobfS4hhcHNFIhR5I6sNpgw,3181
95
96
  sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=X-eNLMhAftsdfs9OOqRcB3tM5X8j9zrN1JqYXpntc2U,5843
96
- sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=MBxHr87cPiRNczU3HQnKvN2HDQGdv0GqWQw7INWVgkY,7607
97
- sempy_labs/directlake/_warm_cache.py,sha256=X4R2_i4jFnFXYmwhDIxPb9h1CdPVLzFKHItsz0QpeRg,8248
97
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=Z67WolTLIrflLFBvRuhmE_MxYGhSBQCFvjqojz6yavw,7614
98
+ sempy_labs/directlake/_warm_cache.py,sha256=b7XvH74nQrEoraOflrXs4-fVuiLtRrmsQI35TBnYEqs,8307
98
99
  sempy_labs/lakehouse/__init__.py,sha256=6LVQltQ3cjyiuxvjXTuNdJ163zSqi4h_tEZY4zsxuSw,647
99
100
  sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=Bb_iCTlNwl0wdN4dW_E7tVnfbHhHwQT_l0SUqvcbYpo,2582
100
- sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=TV_Ch6BBs0LmrHU1eCTa_HfqwwXTx0GXY2Klxjpx5T4,9009
101
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=zizPKtwDojIN6wbPhrQOUL6-_-kq8zqfbsT8BmS94i8,9010
101
102
  sempy_labs/lakehouse/_lakehouse.py,sha256=_yn0ySUrJQD9nySa3gFpEGr6AvF-vOKIMNJruotfxHQ,5224
102
- sempy_labs/lakehouse/_shortcuts.py,sha256=MrmiPuCC3mAb5K7-XFTY5YK1A8nr9tw--nyaAlFBN6A,7335
103
+ sempy_labs/lakehouse/_shortcuts.py,sha256=tHkeLcSOiDv36XFysyiQBSKsq9-ohb20vXziU-lf0G4,7337
103
104
  sempy_labs/migration/__init__.py,sha256=w4vvGk6wTWXVfofJDmio2yIFvSSJsxOpjv6mvNGmrOI,1043
104
- sempy_labs/migration/_create_pqt_file.py,sha256=4u5cod8Q0IDMoNicUAzNBfAVlE9OSd3mHJcSSRCIdgQ,9246
105
+ sempy_labs/migration/_create_pqt_file.py,sha256=RgRo1lG9lex9Xwe5dwSJ2udHiE7w0N_a5BilS2cX6xo,9320
105
106
  sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=5fSZHylS8yLmk7maYDsvKbOqB9unxT4sQZq5HxBZwQY,17969
106
107
  sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=yNSx126ru7-mUXI3iISwmw8a5jRCwcWw4l5eMXldrcU,6253
107
108
  sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=8IN45AKxic3_4yrC-N5rqWMzO6bCLF9qoyWxbxrvBHc,22900
@@ -109,16 +110,17 @@ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=6B50BkJ
109
110
  sempy_labs/migration/_migration_validation.py,sha256=pl5Yv4BwRHZfAL0p2soj_Gk8FL3UcwiqKbX23pJe1oQ,2788
110
111
  sempy_labs/migration/_refresh_calc_tables.py,sha256=gUFssZ5vyN4OmViLYkBNkLmYscvQTk8RBZ-i_lrCJYU,5239
111
112
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
112
- sempy_labs/report/__init__.py,sha256=fUSbHtFQS2dz3TUhEsZ_rsA9ntKkZYjRhkdgbvPT3pI,1158
113
- sempy_labs/report/_generate_report.py,sha256=SQKWQdrNIgHDz-wE2ZYcL15GWe8UeQYV4BKV__RN7aE,12557
113
+ sempy_labs/report/__init__.py,sha256=esE_i1lL2AdcwWs7cE8AKSXZy_7w_4jJJtFULFkDPcU,1244
114
+ sempy_labs/report/_download_report.py,sha256=C3jsUJ59L5iv8H9NJWVlIuMUrr66VCnVG4xJJ-hACXs,2772
115
+ sempy_labs/report/_generate_report.py,sha256=7MbGqXXRS4l76v3rAZgBMMU4j3QEFM9Lxz8ko5yjEcE,12560
114
116
  sempy_labs/report/_paginated.py,sha256=-u0vV6byPOBWUOmeBtjPCTmarymucoRv_DvXA54FIHY,2281
115
117
  sempy_labs/report/_report_bpa.py,sha256=pvj6BdSALuMTB-nl9L-TOKaTXqBpIr3CqkJhdXd0YSQ,13570
116
118
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
117
- sempy_labs/report/_report_functions.py,sha256=Dy7fHZiIkuib_3bOTGQNw90aL_z2Vzle1TKeQiH44IA,30096
119
+ sempy_labs/report/_report_functions.py,sha256=nKqsVsjGrv8TUXsBXpb5ejEopAaFELc7YzhGerJUTBI,30099
118
120
  sempy_labs/report/_report_helper.py,sha256=fkSo5m3_KlAlo-fu8FTnxINigWbZI66ex-r44WalKsw,8711
119
121
  sempy_labs/report/_report_list_functions.py,sha256=4k-bPEi4uW_ozHTZNc_67idP1LQZPT-gO9ToRCv06fk,3127
120
- sempy_labs/report/_report_rebind.py,sha256=Donwfr4A7fLvE-nVI1rpKJTJxyhHuxaowTbhJLLfxtM,5187
121
- sempy_labs/report/_reportwrapper.py,sha256=Ud88HmUmWLfG5CgHXSXBraOOea_DVvP3z_cl_hqDOTE,75724
122
+ sempy_labs/report/_report_rebind.py,sha256=GbOfEb9qz4SdXVGopiWSkGMDKnneJxd7wx4_OWKZ1Js,5188
123
+ sempy_labs/report/_reportwrapper.py,sha256=f5nVYKHqUjqoTBLcwZbNlH9YBZlWqadpfkLJYvJ2WNg,75780
122
124
  sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
123
125
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
124
126
  sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json,sha256=kzjBlNdjbsSBBSHBwbQc298AJCr9Vp6Ex0D5PemUuT0,1578
@@ -149,9 +151,9 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
149
151
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
150
152
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
151
153
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
152
- sempy_labs/tom/_model.py,sha256=bRMa1a1wa63Z_1FrK_zdy4mWRZyytqIgI-4OQde2NLY,170149
153
- semantic_link_labs-0.8.4.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
154
- semantic_link_labs-0.8.4.dist-info/METADATA,sha256=ZGsId2oI83I9_a3P1umcYeuSWpc8DFPLjncNJ3Le6XY,18910
155
- semantic_link_labs-0.8.4.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
156
- semantic_link_labs-0.8.4.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
157
- semantic_link_labs-0.8.4.dist-info/RECORD,,
154
+ sempy_labs/tom/_model.py,sha256=gW0uR-Fx8zNkaIZS6wUTtEJeYQjMsbcaSft9lv9SGGA,172011
155
+ semantic_link_labs-0.8.5.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
156
+ semantic_link_labs-0.8.5.dist-info/METADATA,sha256=K5_wjyzRaw-p85GzF4laagXYMFuSIxR0ypuXx6nadno,19241
157
+ semantic_link_labs-0.8.5.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91
158
+ semantic_link_labs-0.8.5.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
159
+ semantic_link_labs-0.8.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.3.0)
2
+ Generator: setuptools (75.5.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -1,3 +1,14 @@
1
+ from sempy_labs._mirrored_databases import (
2
+ get_mirrored_database_definition,
3
+ get_mirroring_status,
4
+ list_mirrored_databases,
5
+ stop_mirroring,
6
+ start_mirroring,
7
+ create_mirrored_database,
8
+ delete_mirrored_database,
9
+ update_mirrored_database_definition,
10
+ get_tables_mirroring_status,
11
+ )
1
12
  from sempy_labs._managed_private_endpoints import (
2
13
  list_managed_private_endpoints,
3
14
  create_managed_private_endpoint,
@@ -140,6 +151,7 @@ from sempy_labs._dataflows import (
140
151
  list_dataflow_storage_accounts,
141
152
  assign_workspace_to_dataflow_storage,
142
153
  list_dataflows,
154
+ list_upstream_dataflows,
143
155
  )
144
156
  from sempy_labs._connections import (
145
157
  list_connections,
@@ -148,7 +160,11 @@ from sempy_labs._connections import (
148
160
  # create_connection_vnet,
149
161
  # create_connection_on_prem
150
162
  )
151
- from sempy_labs._dax import evaluate_dax_impersonation
163
+ from sempy_labs._dax import (
164
+ evaluate_dax_impersonation,
165
+ get_dax_query_dependencies,
166
+ get_dax_query_memory_size,
167
+ )
152
168
  from sempy_labs._generate_semantic_model import (
153
169
  create_blank_semantic_model,
154
170
  create_semantic_model_from_bim,
@@ -397,4 +413,16 @@ __all__ = [
397
413
  "list_managed_private_endpoints",
398
414
  "create_managed_private_endpoint",
399
415
  "delete_managed_private_endpoint",
416
+ "get_dax_query_dependencies",
417
+ "get_dax_query_memory_size",
418
+ "get_mirrored_database_definition",
419
+ "get_mirroring_status",
420
+ "list_mirrored_databases",
421
+ "stop_mirroring",
422
+ "start_mirroring",
423
+ "create_mirrored_database",
424
+ "delete_mirrored_database",
425
+ "update_mirrored_database_definition",
426
+ "get_tables_mirroring_status",
427
+ "list_upstream_dataflows",
400
428
  ]
@@ -15,7 +15,7 @@ def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
15
15
  """
16
16
  Shows the data pipelines within a workspace.
17
17
 
18
- This is a wrapper function for the following API: `Items - List Data Pipelines <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/list-data-pipelines`_.
18
+ This is a wrapper function for the following API: `Items - List Data Pipelines <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/list-data-pipelines>`_.
19
19
 
20
20
  Parameters
21
21
  ----------
@@ -59,7 +59,7 @@ def create_data_pipeline(
59
59
  """
60
60
  Creates a Fabric data pipeline.
61
61
 
62
- This is a wrapper function for the following API: `Items - Create Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/create-data-pipeline`_.
62
+ This is a wrapper function for the following API: `Items - Create Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/create-data-pipeline>`_.
63
63
 
64
64
  Parameters
65
65
  ----------
@@ -96,7 +96,7 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
96
96
  """
97
97
  Deletes a Fabric data pipeline.
98
98
 
99
- This is a wrapper function for the following API: `Items - Delete Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/delete-data-pipeline`_.
99
+ This is a wrapper function for the following API: `Items - Delete Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/delete-data-pipeline>`_.
100
100
 
101
101
  Parameters
102
102
  ----------
sempy_labs/_dataflows.py CHANGED
@@ -2,10 +2,12 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
+ _is_valid_uuid,
5
6
  )
6
- from typing import Optional
7
+ from typing import Optional, Tuple
7
8
  import sempy_labs._icons as icons
8
9
  from sempy.fabric.exceptions import FabricHTTPException
10
+ from uuid import UUID
9
11
 
10
12
 
11
13
  def list_dataflows(workspace: Optional[str] = None):
@@ -59,7 +61,7 @@ def assign_workspace_to_dataflow_storage(
59
61
  """
60
62
  Assigns a dataflow storage account to a workspace.
61
63
 
62
- This is a wrapper function for the following API: `Dataflow Storage Accounts - Groups AssignToDataflowStorage <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/groups-assign-to-dataflow-storage`_.
64
+ This is a wrapper function for the following API: `Dataflow Storage Accounts - Groups AssignToDataflowStorage <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/groups-assign-to-dataflow-storage>`_.
63
65
 
64
66
  Parameters
65
67
  ----------
@@ -101,7 +103,7 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
101
103
  """
102
104
  Shows the accessible dataflow storage accounts.
103
105
 
104
- This is a wrapper function for the following API: `Dataflow Storage Accounts - Get Dataflow Storage Accounts <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/get-dataflow-storage-accounts`_.
106
+ This is a wrapper function for the following API: `Dataflow Storage Accounts - Get Dataflow Storage Accounts <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/get-dataflow-storage-accounts>`_.
105
107
 
106
108
  Returns
107
109
  -------
@@ -132,3 +134,114 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
132
134
  df["Enabled"] = df["Enabled"].astype(bool)
133
135
 
134
136
  return df
137
+
138
+
139
+ def list_upstream_dataflows(
140
+ dataflow: str | UUID, workspace: Optional[str] = None
141
+ ) -> pd.DataFrame:
142
+ """
143
+ Shows a list of upstream dataflows for the specified dataflow.
144
+
145
+ This is a wrapper function for the following API: `Dataflows - Get Upstream Dataflows In Group <https://learn.microsoft.com/rest/api/power-bi/dataflows/get-upstream-dataflows-in-group>`_.
146
+
147
+ Parameters
148
+ ----------
149
+ dataflow : str | UUID
150
+ Name or UUID of the dataflow.
151
+ workspace : str, default=None
152
+ The Fabric workspace name.
153
+ Defaults to None which resolves to the workspace of the attached lakehouse
154
+ or if no lakehouse attached, resolves to the workspace of the notebook.
155
+
156
+
157
+ Returns
158
+ -------
159
+ pandas.DataFrame
160
+ A pandas dataframe showing a list of upstream dataflows for the specified dataflow.
161
+ """
162
+
163
+ workspace_name = fabric.resolve_workspace_name(workspace)
164
+ workspace_id = fabric.resolve_workspace_id(workspace)
165
+ (dataflow_name, dataflow_id) = _resolve_dataflow_name_and_id(
166
+ dataflow=dataflow, workspace=workspace
167
+ )
168
+ client = fabric.PowerBIRestClient()
169
+
170
+ df = pd.DataFrame(
171
+ columns=[
172
+ "Dataflow Name",
173
+ "Dataflow Id",
174
+ "Workspace Name",
175
+ "Workspace Id",
176
+ "Upstream Dataflow Name",
177
+ "Upstream Dataflow Id",
178
+ "Upstream Workspace Name",
179
+ "Upstream Workspace Id",
180
+ ]
181
+ )
182
+
183
+ def collect_upstreams(
184
+ client, dataflow_id, dataflow_name, workspace_id, workspace_name
185
+ ):
186
+ response = client.get(
187
+ f"/v1.0/myorg/groups/{workspace_id}/dataflows/{dataflow_id}/upstreamDataflows"
188
+ )
189
+ if response.status_code != 200:
190
+ raise FabricHTTPException(response)
191
+
192
+ values = response.json().get("value", [])
193
+ for v in values:
194
+ tgt_dataflow_id = v.get("targetDataflowId")
195
+ tgt_workspace_id = v.get("groupId")
196
+ tgt_workspace_name = fabric.resolve_workspace_name(tgt_workspace_id)
197
+ (tgt_dataflow_name, _) = _resolve_dataflow_name_and_id(
198
+ dataflow=tgt_dataflow_id, workspace=tgt_workspace_name
199
+ )
200
+
201
+ df.loc[len(df)] = {
202
+ "Dataflow Name": dataflow_name,
203
+ "Dataflow Id": dataflow_id,
204
+ "Workspace Name": workspace_name,
205
+ "Workspace Id": workspace_id,
206
+ "Upstream Dataflow Name": tgt_dataflow_name,
207
+ "Upstream Dataflow Id": tgt_dataflow_id,
208
+ "Upstream Workspace Name": tgt_workspace_name,
209
+ "Upstream Workspace Id": tgt_workspace_id,
210
+ }
211
+
212
+ collect_upstreams(
213
+ client,
214
+ tgt_dataflow_id,
215
+ tgt_dataflow_name,
216
+ tgt_workspace_id,
217
+ tgt_workspace_name,
218
+ )
219
+
220
+ collect_upstreams(client, dataflow_id, dataflow_name, workspace_id, workspace_name)
221
+
222
+ return df
223
+
224
+
225
+ def _resolve_dataflow_name_and_id(
226
+ dataflow: str | UUID, workspace: Optional[str] = None
227
+ ) -> Tuple[str, UUID]:
228
+
229
+ if workspace is None:
230
+ workspace = fabric.resolve_workspace_name(workspace)
231
+
232
+ dfD = list_dataflows(workspace=workspace)
233
+
234
+ if _is_valid_uuid(dataflow):
235
+ dfD_filt = dfD[dfD["Dataflow Id"] == dataflow]
236
+ else:
237
+ dfD_filt = dfD[dfD["Dataflow Name"] == dataflow]
238
+
239
+ if len(dfD_filt) == 0:
240
+ raise ValueError(
241
+ f"{icons.red_dot} The '{dataflow}' dataflow does not exist within the '{workspace}' workspace."
242
+ )
243
+
244
+ dataflow_id = dfD_filt["Dataflow Id"].iloc[0]
245
+ dataflow_name = dfD_filt["Dataflow Name"].iloc[0]
246
+
247
+ return dataflow_name, dataflow_id
sempy_labs/_dax.py CHANGED
@@ -3,9 +3,12 @@ import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_dataset_id,
5
5
  resolve_workspace_name_and_id,
6
+ format_dax_object_name,
6
7
  )
8
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
7
9
  from typing import Optional
8
10
  from sempy._utils._log import log
11
+ from tqdm.auto import tqdm
9
12
 
10
13
 
11
14
  @log
@@ -40,10 +43,7 @@ def evaluate_dax_impersonation(
40
43
  A pandas dataframe holding the result of the DAX query.
41
44
  """
42
45
 
43
- # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
44
-
45
46
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
46
-
47
47
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
48
48
 
49
49
  request_body = {
@@ -62,3 +62,189 @@ def evaluate_dax_impersonation(
62
62
  df = pd.DataFrame(data_rows, columns=column_names)
63
63
 
64
64
  return df
65
+
66
+
67
+ @log
68
+ def get_dax_query_dependencies(
69
+ dataset: str,
70
+ dax_string: str,
71
+ put_in_memory: bool = False,
72
+ workspace: Optional[str] = None,
73
+ ) -> pd.DataFrame:
74
+ """
75
+ Obtains the columns on which a DAX query depends, including model dependencies. Shows Vertipaq statistics (i.e. Total Size, Data Size, Dictionary Size, Hierarchy Size) for easy prioritizing.
76
+
77
+ Parameters
78
+ ----------
79
+ dataset : str
80
+ Name of the semantic model.
81
+ dax_string : str
82
+ The DAX query.
83
+ put_in_memory : bool, default=False
84
+ If True, ensures that the dependent columns are put into memory in order to give realistic Vertipaq stats (i.e. Total Size etc.).
85
+ workspace : str, default=None
86
+ The Fabric workspace name.
87
+ Defaults to None which resolves to the workspace of the attached lakehouse
88
+ or if no lakehouse attached, resolves to the workspace of the notebook.
89
+
90
+ Returns
91
+ -------
92
+ pandas.DataFrame
93
+ A pandas dataframe showing the dependent columns of a given DAX query including model dependencies.
94
+ """
95
+
96
+ if workspace is None:
97
+ workspace = fabric.resolve_workspace_name(workspace)
98
+
99
+ # Escape quotes in dax
100
+ dax_string = dax_string.replace('"', '""')
101
+ final_query = f"""
102
+ EVALUATE
103
+ VAR source_query = "{dax_string}"
104
+ VAR all_dependencies = SELECTCOLUMNS(
105
+ INFO.CALCDEPENDENCY("QUERY", source_query),
106
+ "Referenced Object Type",[REFERENCED_OBJECT_TYPE],
107
+ "Referenced Table", [REFERENCED_TABLE],
108
+ "Referenced Object", [REFERENCED_OBJECT]
109
+ )
110
+ RETURN all_dependencies
111
+ """
112
+ dep = fabric.evaluate_dax(
113
+ dataset=dataset, workspace=workspace, dax_string=final_query
114
+ )
115
+
116
+ # Clean up column names and values (remove outside square brackets, underscorees in object type)
117
+ dep.columns = dep.columns.map(lambda x: x[1:-1])
118
+ dep["Referenced Object Type"] = (
119
+ dep["Referenced Object Type"].str.replace("_", " ").str.title()
120
+ )
121
+ dep
122
+
123
+ # Dataframe df will contain the output of all dependencies of the objects used in the query
124
+ df = dep.copy()
125
+
126
+ cd = get_model_calc_dependencies(dataset=dataset, workspace=workspace)
127
+
128
+ for _, r in dep.iterrows():
129
+ ot = r["Referenced Object Type"]
130
+ object_name = r["Referenced Object"]
131
+ table_name = r["Referenced Table"]
132
+ cd_filt = cd[
133
+ (cd["Object Type"] == ot)
134
+ & (cd["Object Name"] == object_name)
135
+ & (cd["Table Name"] == table_name)
136
+ ]
137
+
138
+ # Adds in the dependencies of each object used in the query (i.e. relationship etc.)
139
+ if len(cd_filt) > 0:
140
+ subset = cd_filt[
141
+ ["Referenced Object Type", "Referenced Table", "Referenced Object"]
142
+ ]
143
+ df = pd.concat([df, subset], ignore_index=True)
144
+
145
+ df.columns = df.columns.map(lambda x: x.replace("Referenced ", ""))
146
+ # Remove duplicates
147
+ df = df.drop_duplicates().reset_index(drop=True)
148
+ # Only show columns and remove the rownumber column
149
+ df = df[
150
+ (df["Object Type"].isin(["Column", "Calc Column"]))
151
+ & (~df["Object"].str.startswith("RowNumber-"))
152
+ ]
153
+
154
+ # Get vertipaq stats, filter to just the objects in the df dataframe
155
+ df["Full Object"] = format_dax_object_name(df["Table"], df["Object"])
156
+ dfC = fabric.list_columns(dataset=dataset, workspace=workspace, extended=True)
157
+ dfC["Full Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
158
+
159
+ dfC_filtered = dfC[dfC["Full Object"].isin(df["Full Object"].values)][
160
+ [
161
+ "Table Name",
162
+ "Column Name",
163
+ "Total Size",
164
+ "Data Size",
165
+ "Dictionary Size",
166
+ "Hierarchy Size",
167
+ "Is Resident",
168
+ "Full Object",
169
+ ]
170
+ ].reset_index(drop=True)
171
+
172
+ if put_in_memory:
173
+ not_in_memory = dfC_filtered[dfC_filtered["Is Resident"] == False]
174
+
175
+ if len(not_in_memory) > 0:
176
+ tbls = not_in_memory["Table Name"].unique()
177
+
178
+ # Run basic query to get columns into memory; completed one table at a time (so as not to overload the capacity)
179
+ for table_name in (bar := tqdm(tbls)):
180
+ bar.set_description(f"Warming the '{table_name}' table...")
181
+ css = ", ".join(
182
+ not_in_memory[not_in_memory["Table Name"] == table_name][
183
+ "Full Object"
184
+ ]
185
+ .astype(str)
186
+ .tolist()
187
+ )
188
+ dax = f"""EVALUATE TOPN(1,SUMMARIZECOLUMNS({css}))"""
189
+ fabric.evaluate_dax(
190
+ dataset=dataset, dax_string=dax, workspace=workspace
191
+ )
192
+
193
+ # Get column stats again
194
+ dfC = fabric.list_columns(
195
+ dataset=dataset, workspace=workspace, extended=True
196
+ )
197
+ dfC["Full Object"] = format_dax_object_name(
198
+ dfC["Table Name"], dfC["Column Name"]
199
+ )
200
+
201
+ dfC_filtered = dfC[dfC["Full Object"].isin(df["Full Object"].values)][
202
+ [
203
+ "Table Name",
204
+ "Column Name",
205
+ "Total Size",
206
+ "Data Size",
207
+ "Dictionary Size",
208
+ "Hierarchy Size",
209
+ "Is Resident",
210
+ "Full Object",
211
+ ]
212
+ ].reset_index(drop=True)
213
+
214
+ dfC_filtered.drop(["Full Object"], axis=1, inplace=True)
215
+
216
+ return dfC_filtered
217
+
218
+
219
+ @log
220
+ def get_dax_query_memory_size(
221
+ dataset: str, dax_string: str, workspace: Optional[str] = None
222
+ ) -> int:
223
+ """
224
+ Obtains the total size, in bytes, used by all columns that a DAX query depends on.
225
+
226
+ Parameters
227
+ ----------
228
+ dataset : str
229
+ Name of the semantic model.
230
+ dax_string : str
231
+ The DAX query.
232
+ workspace : str, default=None
233
+ The Fabric workspace name.
234
+ Defaults to None which resolves to the workspace of the attached lakehouse
235
+ or if no lakehouse attached, resolves to the workspace of the notebook.
236
+
237
+ Returns
238
+ -------
239
+ int
240
+ The total size, in bytes, used by all columns that the DAX query depends on.
241
+ """
242
+
243
+ if workspace is None:
244
+ workspace = fabric.resolve_workspace_name(workspace)
245
+
246
+ df = get_dax_query_dependencies(
247
+ dataset=dataset, workspace=workspace, dax_string=dax_string, put_in_memory=True
248
+ )
249
+
250
+ return df["Total Size"].sum()