semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
@@ -2,10 +2,11 @@ import sempy.fabric as fabric
2
2
  from typing import Optional, List
3
3
  from sempy._utils._log import log
4
4
  import sempy_labs._icons as icons
5
- from sempy.fabric.exceptions import FabricHTTPException
6
5
  from sempy_labs._workspaces import assign_workspace_to_capacity
7
- from sempy_labs.admin._basic_functions import (
6
+ from sempy_labs.admin import (
8
7
  assign_workspaces_to_capacity,
8
+ )
9
+ from sempy_labs.admin._capacities import (
9
10
  _list_capacities_meta,
10
11
  list_capacities,
11
12
  )
@@ -15,6 +16,7 @@ from sempy_labs._helper_functions import (
15
16
  _base_api,
16
17
  )
17
18
  from sempy_labs._capacities import create_fabric_capacity
19
+ from uuid import UUID
18
20
 
19
21
 
20
22
  def _migrate_settings(source_capacity: str, target_capacity: str):
@@ -530,7 +532,7 @@ def _migrate_delegated_tenant_settings(source_capacity: str, target_capacity: st
530
532
 
531
533
 
532
534
  @log
533
- def _migrate_spark_settings(source_capacity: str, target_capacity: str):
535
+ def _migrate_spark_settings(source_capacity: str | UUID, target_capacity: str | UUID):
534
536
  """
535
537
  This function migrates a capacity's spark settings to another capacity.
536
538
 
@@ -538,14 +540,14 @@ def _migrate_spark_settings(source_capacity: str, target_capacity: str):
538
540
 
539
541
  Parameters
540
542
  ----------
541
- source_capacity : str
542
- Name of the source capacity.
543
- target_capacity : str
544
- Name of the target capacity.
543
+ source_capacity : str | uuid.UUID
544
+ Name or ID of the source capacity.
545
+ target_capacity : str | uuid.UUID
546
+ Name or ID of the target capacity.
545
547
  """
546
548
 
547
- source_capacity_id = resolve_capacity_id(capacity_name=source_capacity)
548
- target_capacity_id = resolve_capacity_id(capacity_name=target_capacity)
549
+ source_capacity_id = resolve_capacity_id(capacity=source_capacity)
550
+ target_capacity_id = resolve_capacity_id(capacity=target_capacity)
549
551
 
550
552
  # Get source capacity server dns
551
553
  response = _base_api(request=f"metadata/capacityInformation/{source_capacity_id}")
@@ -0,0 +1,60 @@
1
+ from typing import Optional
2
+ from uuid import UUID
3
+ import pandas as pd
4
+ from sempy_labs._helper_functions import (
5
+ _create_dataframe,
6
+ _base_api,
7
+ resolve_workspace_name_and_id,
8
+ _update_dataframe_datatypes,
9
+ )
10
+
11
+
12
+ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
13
+ """
14
+ Shows a list of the dashboards within a workspace.
15
+
16
+ Parameters
17
+ ----------
18
+ workspace : str | uuid.UUID, default=None
19
+ The Fabric workspace name or ID.
20
+ Defaults to None which resolves to the workspace of the attached lakehouse
21
+ or if no lakehouse attached, resolves to the workspace of the notebook.
22
+
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing the dashboards within a workspace.
27
+ """
28
+
29
+ columns = {
30
+ "Dashboard ID": "string",
31
+ "Dashboard Name": "string",
32
+ "Read Only": "bool",
33
+ "Web URL": "string",
34
+ "Embed URL": "string",
35
+ "Data Classification": "string",
36
+ "Users": "string",
37
+ "Subscriptions": "string",
38
+ }
39
+ df = _create_dataframe(columns=columns)
40
+
41
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
42
+
43
+ response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
44
+
45
+ for v in response.json().get("value", []):
46
+ new_data = {
47
+ "Dashboard ID": v.get("id"),
48
+ "Dashboard Name": v.get("displayName"),
49
+ "Read Only": v.get("isReadOnly"),
50
+ "Web URL": v.get("webUrl"),
51
+ "Embed URL": v.get("embedUrl"),
52
+ "Data Classification": v.get("dataClassification"),
53
+ "Users": v.get("users"),
54
+ "Subscriptions": v.get("subscriptions"),
55
+ }
56
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
57
+
58
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
59
+
60
+ return df
@@ -1,13 +1,13 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_workspace_name_and_id,
6
5
  _decode_b64,
7
6
  _base_api,
8
- _print_success,
9
7
  resolve_item_id,
10
8
  _create_dataframe,
9
+ delete_item,
10
+ create_item,
11
11
  )
12
12
  from uuid import UUID
13
13
 
@@ -76,25 +76,8 @@ def create_data_pipeline(
76
76
  or if no lakehouse attached, resolves to the workspace of the notebook.
77
77
  """
78
78
 
79
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
80
-
81
- payload = {"displayName": name}
82
-
83
- if description:
84
- payload["description"] = description
85
-
86
- _base_api(
87
- request=f"/v1/workspaces/{workspace_id}/dataPipelines",
88
- method="post",
89
- payload=payload,
90
- status_codes=[201, 202],
91
- lro_return_status_code=True,
92
- )
93
- _print_success(
94
- item_name=name,
95
- item_type="data pipeline",
96
- workspace_name=workspace_name,
97
- action="created",
79
+ create_item(
80
+ name=name, description=description, type="DataPipeline", workspace=workspace
98
81
  )
99
82
 
100
83
 
@@ -114,16 +97,7 @@ def delete_data_pipeline(name: str | UUID, workspace: Optional[str | UUID] = Non
114
97
  or if no lakehouse attached, resolves to the workspace of the notebook.
115
98
  """
116
99
 
117
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
118
- item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
119
-
120
- fabric.delete_item(item_id=item_id, workspace=workspace)
121
- _print_success(
122
- item_name=name,
123
- item_type="data pipeline",
124
- workspace_name=workspace_name,
125
- action="deleted",
126
- )
100
+ delete_item(item=name, type="DataPipeline", workspace=workspace)
127
101
 
128
102
 
129
103
  def get_data_pipeline_definition(
sempy_labs/_dax.py CHANGED
@@ -62,9 +62,23 @@ def evaluate_dax_impersonation(
62
62
  payload=payload,
63
63
  )
64
64
  data = response.json()["results"][0]["tables"]
65
- column_names = data[0]["rows"][0].keys()
66
- data_rows = [row.values() for item in data for row in item["rows"]]
67
- df = pd.DataFrame(data_rows, columns=column_names)
65
+
66
+ # Get all possible column names from all rows because null columns aren't returned
67
+ all_columns = set()
68
+ for item in data:
69
+ for row in item["rows"]:
70
+ all_columns.update(row.keys())
71
+
72
+ # Create rows with all columns, filling missing values with None
73
+ rows = []
74
+ for item in data:
75
+ for row in item["rows"]:
76
+ # Create a new row with all columns, defaulting to None
77
+ new_row = {col: row.get(col) for col in all_columns}
78
+ rows.append(new_row)
79
+
80
+ # Create DataFrame from the processed rows
81
+ df = pd.DataFrame(rows)
68
82
 
69
83
  return df
70
84