msfabricpysdkcore 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. msfabricpysdkcore/coreapi.py +745 -21
  2. msfabricpysdkcore/otheritems.py +61 -1
  3. msfabricpysdkcore/tests/test_admin_apis.py +2 -2
  4. msfabricpysdkcore/tests/test_apache_airflow_job.py +60 -0
  5. msfabricpysdkcore/tests/test_digital_twin_builder.py +60 -0
  6. msfabricpysdkcore/tests/test_domains.py +11 -18
  7. msfabricpysdkcore/tests/test_environments.py +34 -27
  8. msfabricpysdkcore/tests/test_evenstreams.py +31 -26
  9. msfabricpysdkcore/tests/test_eventhouses.py +63 -0
  10. msfabricpysdkcore/tests/test_external_data_shares.py +2 -2
  11. msfabricpysdkcore/tests/test_fabric_azure_client.py +1 -1
  12. msfabricpysdkcore/tests/test_folders.py +9 -6
  13. msfabricpysdkcore/tests/test_gateways.py +3 -3
  14. msfabricpysdkcore/tests/test_git.py +1 -1
  15. msfabricpysdkcore/tests/test_items.py +2 -2
  16. msfabricpysdkcore/tests/test_jobs.py +6 -4
  17. msfabricpysdkcore/tests/test_kql_dashboards.py +3 -3
  18. msfabricpysdkcore/tests/test_kql_queryset.py +1 -5
  19. msfabricpysdkcore/tests/test_kqldatabases.py +3 -3
  20. msfabricpysdkcore/tests/test_lakehouse.py +6 -10
  21. msfabricpysdkcore/tests/test_managed_private_endpoints.py +13 -12
  22. msfabricpysdkcore/tests/test_mirrored_azuredatabricks_catalog.py +81 -0
  23. msfabricpysdkcore/tests/test_ml_experiments.py +1 -3
  24. msfabricpysdkcore/tests/test_ml_models.py +2 -3
  25. msfabricpysdkcore/tests/test_mounted_adf.py +1 -1
  26. msfabricpysdkcore/tests/test_notebooks.py +3 -2
  27. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +2 -2
  28. msfabricpysdkcore/tests/test_other_items.py +2 -2
  29. msfabricpysdkcore/tests/test_reflex.py +1 -2
  30. msfabricpysdkcore/tests/test_semantic_model.py +1 -1
  31. msfabricpysdkcore/tests/test_shortcuts.py +3 -4
  32. msfabricpysdkcore/tests/test_spark.py +2 -2
  33. msfabricpysdkcore/tests/test_sparkjobdefinition.py +1 -1
  34. msfabricpysdkcore/tests/test_sql_endpoint.py +28 -0
  35. msfabricpysdkcore/tests/test_warehouses.py +1 -1
  36. msfabricpysdkcore/tests/test_workspaces_capacities.py +8 -8
  37. msfabricpysdkcore/workspace.py +180 -10
  38. {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/METADATA +4 -2
  39. msfabricpysdkcore-0.2.8.dist-info/RECORD +78 -0
  40. msfabricpysdkcore/tests/test_deployment_pipeline.py +0 -63
  41. msfabricpysdkcore/tests/test_evenhouses.py +0 -56
  42. msfabricpysdkcore-0.2.6.dist-info/RECORD +0 -75
  43. {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/WHEEL +0 -0
  44. {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/licenses/LICENSE +0 -0
  45. {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,15 @@
1
1
  from msfabricpysdkcore.item import Item
2
2
 
3
+ class ApacheAirflowJob(Item):
4
+ """Class to represent a ApacheAirflowJob in Microsoft Fabric"""
5
+
6
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
7
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
8
+
9
+ def from_dict(item_dict, core_client):
10
+ return ApacheAirflowJob(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
11
+ properties=item_dict.get('properties', None),
12
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
3
13
 
4
14
  class CopyJob(Item):
5
15
  """Class to represent a copy job in Microsoft Fabric"""
@@ -36,7 +46,44 @@ class DataPipeline(Item):
36
46
 
37
47
  def run_on_demand_item_job(self, execution_data=None):
38
48
  return self.core_client.run_on_demand_item_job(workspace_id=self.workspace_id, item_id=self.id, job_type="Pipeline", execution_data=execution_data)
49
+
50
+ class DigitalTwinBuilder(Item):
51
+ """Class to represent a DigitalTwinBuilder in Microsoft Fabric"""
52
+
53
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
54
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
55
+
56
+ def from_dict(item_dict, core_client):
57
+ return DigitalTwinBuilder(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
58
+ properties=item_dict.get('properties', None),
59
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
60
+
61
+ class DigitalTwinBuilderFlow(Item):
62
+ """Class to represent a DigitalTwinBuilderFlow in Microsoft Fabric"""
63
+
64
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
65
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
66
+
67
+ def from_dict(item_dict, core_client):
68
+ return DigitalTwinBuilderFlow(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
69
+ properties=item_dict.get('properties', None),
70
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
71
+
72
+ class MirroredAzureDatabricksCatalog(Item):
73
+ """Class to represent a mirrored Azure Databricks catalog in Microsoft Fabric"""
74
+
75
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
76
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
77
+
78
+ def from_dict(item_dict, core_client):
79
+ return MirroredAzureDatabricksCatalog(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
80
+ properties=item_dict.get('properties', None),
81
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
39
82
 
83
+ def refresh_catalog_metadata(self):
84
+ """Method to refresh the metadata of the mirrored Azure Databricks catalog"""
85
+ return self.core_client.refresh_mirrored_azure_databricks_catalog_metadata(self.workspace_id, self.id)
86
+
40
87
  class Eventhouse(Item):
41
88
  """Class to represent a eventhouse in Microsoft Fabric"""
42
89
 
@@ -110,6 +157,17 @@ class Warehouse(Item):
110
157
  return Warehouse(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
111
158
  properties=item_dict.get('properties', None),
112
159
  definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
160
+
161
+ class WarehouseSnapshot(Item):
162
+ """Class to represent a warehouse snapshot in Microsoft Fabric"""
163
+
164
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
165
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
166
+
167
+ def from_dict(item_dict, core_client):
168
+ return WarehouseSnapshot(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
169
+ properties=item_dict.get('properties', None),
170
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
113
171
 
114
172
  class KQLDashboard(Item):
115
173
  """Class to represent a kql dashboard in Microsoft Fabric"""
@@ -180,7 +238,9 @@ class GraphQLApi(Item):
180
238
  return GraphQLApi(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
181
239
  properties=item_dict.get('properties', None),
182
240
  definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
183
-
241
+
242
+
243
+
184
244
  class MirroredDatabase(Item):
185
245
  """Class to represent a mirrored database in Microsoft Fabric"""
186
246
 
@@ -147,7 +147,7 @@ class TestFabricClientCore(unittest.TestCase):
147
147
 
148
148
  fca = self.fca
149
149
 
150
- items = [{"id": "9cdd3192-bcd0-4cbe-b945-29f5964e7ab7", "type": "Lakehouse"}]
150
+ items = [{"id": "e79d7a0e-1741-4ddf-a705-b861f2775f97", "type": "Lakehouse"}]
151
151
  label_id = "defa4170-0d19-0005-0007-bc88714345d2"
152
152
  resp = fca.bulk_set_labels(items=items, label_id=label_id)
153
153
  self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
@@ -159,7 +159,7 @@ class TestFabricClientCore(unittest.TestCase):
159
159
  fca = self.fca
160
160
 
161
161
  data_shares = fca.list_external_data_shares()
162
- ws_id = "63aa9e13-4912-4abe-9156-8a56e565b7a3"
162
+ ws_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
163
163
 
164
164
  data_shares = [d for d in data_shares if d['workspaceId'] == ws_id]
165
165
 
@@ -0,0 +1,60 @@
1
+ import unittest
2
+ from dotenv import load_dotenv
3
+ from msfabricpysdkcore import FabricClientCore
4
+ from datetime import datetime
5
+ load_dotenv()
6
+
7
+ class TestFabricClientCore(unittest.TestCase):
8
+
9
+ def __init__(self, *args, **kwargs):
10
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
11
+ self.fcc = FabricClientCore()
12
+
13
+ def test_apache_airflow_job(self):
14
+ fcc = self.fcc
15
+
16
+ workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
17
+ item_id = "4e685286-d909-4ccb-911f-590ee3c3df14"
18
+
19
+ apache_airflow_job = fcc.list_apache_airflow_jobs(workspace_id=workspace_id)
20
+ for apache_airflow_job in apache_airflow_job:
21
+ if apache_airflow_job.id != item_id:
22
+ resp = fcc.delete_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job.id)
23
+ self.assertEqual(resp, 200)
24
+
25
+ apache_airflow_job_definition = fcc.get_apache_airflow_job_definition(workspace_id=workspace_id, apache_airflow_job_id=item_id)
26
+ self.assertIn("definition", apache_airflow_job_definition)
27
+ definition = apache_airflow_job_definition["definition"]
28
+
29
+ date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
30
+ date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
31
+ date_str = f"copyjob{date_str}"
32
+
33
+ apache_airflow_job_new = fcc.create_apache_airflow_job(workspace_id=workspace_id, display_name=date_str, definition=definition)
34
+
35
+ self.assertEqual(apache_airflow_job_new.display_name, date_str)
36
+
37
+ apache_airflow_job_get = fcc.get_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
38
+ self.assertEqual(apache_airflow_job_get.display_name, date_str)
39
+
40
+ apache_airflow_job = fcc.list_apache_airflow_jobs(workspace_id=workspace_id)
41
+ self.assertEqual(len(apache_airflow_job), 2)
42
+
43
+ date_str_updated = date_str + "_updated"
44
+ apache_airflow_job_updated = fcc.update_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id, display_name=date_str_updated, return_item=True)
45
+ self.assertEqual(apache_airflow_job_updated.display_name, date_str_updated)
46
+
47
+ apache_airflow_job_updated = fcc.update_apache_airflow_job_definition(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id, definition=definition)
48
+ self.assertEqual(apache_airflow_job_updated.status_code, 200)
49
+
50
+ for apache_airflow_job in apache_airflow_job:
51
+ if apache_airflow_job.id != item_id:
52
+ resp = fcc.delete_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job.id)
53
+ self.assertEqual(resp, 200)
54
+
55
+
56
+
57
+
58
+
59
+
60
+
@@ -0,0 +1,60 @@
1
+ import unittest
2
+ from dotenv import load_dotenv
3
+ from msfabricpysdkcore import FabricClientCore
4
+ from datetime import datetime
5
+ load_dotenv()
6
+
7
+ class TestFabricClientCore(unittest.TestCase):
8
+
9
+ def __init__(self, *args, **kwargs):
10
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
11
+ self.fcc = FabricClientCore()
12
+
13
+ def test_digital_twin_builder(self):
14
+ fcc = self.fcc
15
+
16
+ workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
17
+ item_id = "d7260274-8038-44ec-b096-dec1723931d1"
18
+
19
+ digital_twin_builder = fcc.list_digital_twin_builders(workspace_id=workspace_id)
20
+ for digital_twin_builder in digital_twin_builder:
21
+ if digital_twin_builder.id != item_id:
22
+ resp = fcc.delete_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder.id)
23
+ self.assertEqual(resp, 200)
24
+
25
+ digital_twin_builder_definition = fcc.get_digital_twin_builder_definition(workspace_id=workspace_id, digital_twin_builder_id=item_id)
26
+ self.assertIn("definition", digital_twin_builder_definition)
27
+ definition = digital_twin_builder_definition["definition"]
28
+
29
+ date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
30
+ date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
31
+ date_str = f"copyjob{date_str}"
32
+
33
+ digital_twin_builder_new = fcc.create_digital_twin_builder(workspace_id=workspace_id, display_name=date_str, definition=definition)
34
+
35
+ self.assertEqual(digital_twin_builder_new.display_name, date_str)
36
+
37
+ digital_twin_builder_get = fcc.get_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_new.id)
38
+ self.assertEqual(digital_twin_builder_get.display_name, date_str)
39
+
40
+ digital_twin_builder = fcc.list_digital_twin_builders(workspace_id=workspace_id)
41
+ self.assertEqual(len(digital_twin_builder), 2)
42
+
43
+ date_str_updated = date_str + "_updated"
44
+ digital_twin_builder_updated = fcc.update_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_new.id, display_name=date_str_updated, return_item=True)
45
+ self.assertEqual(digital_twin_builder_updated.display_name, date_str_updated)
46
+
47
+ digital_twin_builder_updated = fcc.update_digital_twin_builder_definition(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_new.id, definition=definition)
48
+ self.assertIn(digital_twin_builder_updated.status_code, [200,202])
49
+
50
+ for digital_twin_builder in digital_twin_builder:
51
+ if digital_twin_builder.id != item_id:
52
+ resp = fcc.delete_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder.id)
53
+ self.assertEqual(resp, 200)
54
+
55
+
56
+
57
+
58
+
59
+
60
+
@@ -16,21 +16,14 @@ class TestFabricClientCore(unittest.TestCase):
16
16
  fcc = FabricClientCore()
17
17
  fca = FabricClientAdmin()
18
18
 
19
- ws = fcc.get_workspace_by_name("sdktestdomains")
19
+ ws = fcc.get_workspace_by_name("sdkswedencentral")
20
20
  cap = fcc.get_capacity(capacity_id=ws.capacity_id)
21
21
  principal = {'id': '1dc64c6e-7a10-4ea9-8488-85d0739a377d', 'type': 'User'}
22
22
 
23
23
  # Delete if exists
24
- try:
25
- domain = fca.get_domain_by_name("sdktestdomains")
26
- domain.delete()
27
- except:
28
- pass
29
- try:
30
- domain = fca.get_domain_by_name("sdktestdomains2")
31
- domain.delete()
32
- except:
33
- pass
24
+ for dom in fca.list_domains():
25
+ if "sdktestdomain" in dom.display_name:
26
+ dom.delete()
34
27
 
35
28
  # Create domain
36
29
  domain_name = "sdktestdomains" + datetime.now().strftime("%Y%m%d%H%M%S")
@@ -64,9 +57,9 @@ class TestFabricClientCore(unittest.TestCase):
64
57
  self.assertEqual(status_code, 200)
65
58
 
66
59
  # List domain workspaces
67
- workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=True)
60
+ workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=False)
68
61
  self.assertGreater(len(workspaces), 0)
69
- workspaces_ids = [w.id for w in workspaces]
62
+ workspaces_ids = [w["id"] for w in workspaces]
70
63
  self.assertIn(ws.id, workspaces_ids)
71
64
 
72
65
  # Unassign domain workspaces by ids
@@ -80,9 +73,9 @@ class TestFabricClientCore(unittest.TestCase):
80
73
  status_code = fca.assign_domain_workspaces_by_capacities(domain.id, [cap.id])
81
74
  self.assertEqual(status_code, 202)
82
75
 
83
- workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=True)
76
+ workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=False)
84
77
  self.assertGreater(len(workspaces), 0)
85
- workspaces_ids = [w.id for w in workspaces]
78
+ workspaces_ids = [w["id"] for w in workspaces]
86
79
  self.assertIn(ws.id, workspaces_ids)
87
80
 
88
81
  # Unassign all domain workspaces
@@ -93,13 +86,13 @@ class TestFabricClientCore(unittest.TestCase):
93
86
  self.assertEqual(len(workspaces), 0)
94
87
 
95
88
  # Assign domain workspaces by principals
96
- status_code = fca.assign_domains_workspaces_by_principals(domain.id, [principal], wait_for_completion=True)
89
+ status_code = fca.assign_domains_workspaces_by_principals(domain.id, [principal], wait_for_completion=False)
97
90
 
98
91
  self.assertEqual(status_code, 202)
99
92
 
100
- workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=True)
93
+ workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=False)
101
94
  self.assertGreater(len(workspaces), 0)
102
- workspaces_ids = [w.id for w in workspaces]
95
+ workspaces_ids = [w["id"] for w in workspaces]
103
96
  self.assertIn(ws.id, workspaces_ids)
104
97
 
105
98
  # Role assignments bulk assign
@@ -13,36 +13,45 @@ class TestFabricClientCore(unittest.TestCase):
13
13
  self.fc = FabricClientCore()
14
14
 
15
15
  def test_environments_crudl(self):
16
- fc = self.fc
17
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
18
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
16
+ fcc = self.fc
17
+ workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
18
+ item_id = '35a08bcb-ff8c-40f7-93ea-b86dc1affce5'
19
+
20
+ environments = fcc.list_environments(workspace_id=workspace_id)
21
+ for environment in environments:
22
+ if environment.id != item_id:
23
+ resp = fcc.delete_environment(workspace_id=workspace_id, environment_id=environment.id)
24
+ self.assertEqual(resp, 200)
25
+
19
26
 
20
- env_name = "env" + datetime_str
21
- environment1 = fc.create_environment(workspace_id, display_name=env_name)
22
- self.assertEqual(environment1.display_name, env_name)
27
+ date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
28
+ date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
29
+ date_str = f"environment{date_str}"
23
30
 
24
- environments = fc.list_environments(workspace_id)
25
- environment_names = [env.display_name for env in environments]
26
- self.assertGreater(len(environments), 0)
27
- self.assertIn(env_name, environment_names)
31
+ environment_new = fcc.create_environment(workspace_id=workspace_id, display_name=date_str)
28
32
 
29
- env = fc.get_environment(workspace_id, environment_name=env_name)
30
- self.assertIsNotNone(env.id)
31
- self.assertEqual(env.display_name, env_name)
32
- new_name = env_name + "2"
33
- env2 = fc.update_environment(workspace_id, env.id, display_name=new_name, return_item=True)
33
+ self.assertEqual(environment_new.display_name, date_str)
34
34
 
35
- env = fc.get_environment(workspace_id, environment_id=env.id)
36
- self.assertEqual(env.display_name, new_name)
37
- self.assertEqual(env.id, env2.id)
35
+ environment_get = fcc.get_environment(workspace_id=workspace_id, environment_id=environment_new.id)
36
+ self.assertEqual(environment_get.display_name, date_str)
38
37
 
39
- status_code = fc.delete_environment(workspace_id, env.id)
40
- self.assertEqual(status_code, 200)
38
+ environments = fcc.list_environments(workspace_id=workspace_id)
39
+ self.assertEqual(len(environments), 2)
40
+
41
+ date_str_updated = date_str + "_updated"
42
+ environment_updated = fcc.update_environment(workspace_id=workspace_id, environment_id=environment_new.id, display_name=date_str_updated, return_item=True)
43
+ self.assertEqual(environment_updated.display_name, date_str_updated)
44
+
45
+ for environment in environments:
46
+ if environment.id != item_id:
47
+ resp = fcc.delete_environment(workspace_id=workspace_id, environment_id=environment.id)
48
+ self.assertEqual(resp, 200)
41
49
 
42
50
  def test_environment_details(self):
43
51
  fc = FabricClientCore()
44
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
45
- environment_id = '5648be67-28fa-48b6-9d1f-3c87c3704d3c'
52
+ workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
53
+ environment_id = '35a08bcb-ff8c-40f7-93ea-b86dc1affce5'
54
+
46
55
  published_settings = fc.get_published_settings(workspace_id=workspace_id, environment_id=environment_id)
47
56
  self.assertIsNotNone(published_settings)
48
57
  self.assertIn("instancePool", published_settings)
@@ -67,13 +76,13 @@ class TestFabricClientCore(unittest.TestCase):
67
76
 
68
77
  def test_environment_spark_libraries(self):
69
78
  fc = self.fc
70
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
71
- environment_id = '5648be67-28fa-48b6-9d1f-3c87c3704d3c'
79
+ workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
80
+ environment_id = '35a08bcb-ff8c-40f7-93ea-b86dc1affce5'
72
81
 
73
82
  resp = fc.get_published_libraries(workspace_id, environment_id)
74
83
  self.assertIn('customLibraries', resp)
75
84
  self.assertIn('wheelFiles', resp['customLibraries'])
76
- self.assertIn('msfabricpysdkcore-0.1.1-py3-none-any.whl', resp['customLibraries']['wheelFiles'])
85
+ self.assertIn('msfabricpysdkcore-0.2.6-py3-none-any.whl', resp['customLibraries']['wheelFiles'])
77
86
 
78
87
  resp = fc.upload_staging_library(workspace_id, environment_id, 'dummy.whl')
79
88
  self.assertEqual(resp.status_code, 200)
@@ -83,7 +92,6 @@ class TestFabricClientCore(unittest.TestCase):
83
92
  self.assertIn('customLibraries', resp)
84
93
  self.assertIn('wheelFiles', resp['customLibraries'])
85
94
  self.assertIn('dummy.whl', resp['customLibraries']['wheelFiles'])
86
- self.assertIn('staging.whl', resp['customLibraries']['wheelFiles'])
87
95
 
88
96
 
89
97
  resp = fc.publish_environment(workspace_id, environment_id)
@@ -105,7 +113,6 @@ class TestFabricClientCore(unittest.TestCase):
105
113
  self.assertIn('customLibraries', resp)
106
114
  self.assertIn('wheelFiles', resp['customLibraries'])
107
115
  self.assertNotIn('dummy.whl', resp['customLibraries']['wheelFiles'])
108
- self.assertIn('staging.whl', resp['customLibraries']['wheelFiles'])
109
116
 
110
117
 
111
118
 
@@ -15,38 +15,43 @@ class TestFabricClientCore(unittest.TestCase):
15
15
 
16
16
  def test_eventstreams(self):
17
17
 
18
- fc = self.fc
19
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
18
+ fcc = self.fc
20
19
 
21
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
22
- es_name = "es" + datetime_str
20
+ workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
21
+ item_id = "94f4b54b-980b-43f9-8b67-ded8028cf1b9"
23
22
 
24
- eventstream = fc.create_eventstream(workspace_id, display_name=es_name)
25
- self.assertEqual(eventstream.display_name, es_name)
23
+ eventstreams = fcc.list_eventstreams(workspace_id=workspace_id)
24
+ for eventstream in eventstreams:
25
+ if eventstream.id != item_id:
26
+ resp = fcc.delete_eventstream(workspace_id=workspace_id, eventstream_id=eventstream.id)
27
+ self.assertEqual(resp, 200)
26
28
 
27
- eventstreams = fc.list_eventstreams(workspace_id)
28
- eventstream_names = [es.display_name for es in eventstreams]
29
- self.assertGreater(len(eventstreams), 0)
30
- self.assertIn(es_name, eventstream_names)
29
+ eventstream_definition = fcc.get_eventstream_definition(workspace_id=workspace_id, eventstream_id=item_id)
30
+ self.assertIn("definition", eventstream_definition)
31
+ definition = eventstream_definition["definition"]
31
32
 
32
-
33
- es = fc.get_eventstream(workspace_id, eventstream_name=es_name)
34
- self.assertIsNotNone(es.id)
35
- self.assertEqual(es.display_name, es_name)
33
+ date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
34
+ date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
35
+ date_str = f"eventstream{date_str}"
36
36
 
37
- es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2", return_item=True)
37
+ eventstream_new = fcc.create_eventstream(workspace_id=workspace_id, display_name=date_str, definition=definition)
38
38
 
39
- es = fc.get_eventstream(workspace_id, eventstream_id=es.id)
40
- self.assertEqual(es.display_name, f"{es_name}2")
41
- self.assertEqual(es.id, es2.id)
39
+ self.assertEqual(eventstream_new.display_name, date_str)
42
40
 
43
- response = fc.update_eventstream_definition(workspace_id, eventstream_id=es.id, definition=es.definition)
44
- self.assertIn(response.status_code, [200, 202])
41
+ eventstream_get = fcc.get_eventstream(workspace_id=workspace_id, eventstream_id=eventstream_new.id)
42
+ self.assertEqual(eventstream_get.display_name, date_str)
45
43
 
46
- definition = fc.get_eventstream_definition(workspace_id, eventstream_id=es.id)
47
- self.assertIn("definition", definition)
48
- self.assertIn("parts", definition["definition"])
49
- self.assertGreaterEqual(len(definition["definition"]["parts"]), 3)
44
+ eventstreams = fcc.list_eventstreams(workspace_id=workspace_id)
45
+ self.assertEqual(len(eventstreams), 2)
50
46
 
51
- status_code = fc.delete_eventstream(workspace_id, es.id)
52
- self.assertEqual(status_code, 200)
47
+ date_str_updated = date_str + "_updated"
48
+ eventstream_updated = fcc.update_eventstream(workspace_id=workspace_id, eventstream_id=eventstream_new.id, display_name=date_str_updated, return_item=True)
49
+ self.assertEqual(eventstream_updated.display_name, date_str_updated)
50
+
51
+ eventstream_updated = fcc.update_eventstream_definition(workspace_id=workspace_id, eventstream_id=eventstream_new.id, definition=definition)
52
+ self.assertEqual(eventstream_updated.status_code, 200)
53
+
54
+ for eventstream in eventstreams:
55
+ if eventstream.id != item_id:
56
+ resp = fcc.delete_eventstream(workspace_id=workspace_id, eventstream_id=eventstream.id)
57
+ self.assertEqual(resp, 200)
@@ -0,0 +1,63 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+
21
+
22
+ def test_eventhouses(self):
23
+
24
+ fcc = self.fc
25
+
26
+ workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
27
+ item_id = "5d544ac1-c58d-4d3a-a032-f57dc4b5c2a7"
28
+
29
+ eventhouses = fcc.list_eventhouses(workspace_id=workspace_id)
30
+ for eventhouse in eventhouses:
31
+ if eventhouse.id != item_id:
32
+ resp = fcc.delete_eventhouse(workspace_id=workspace_id, eventhouse_id=eventhouse.id)
33
+ self.assertEqual(resp, 200)
34
+
35
+ eventhouse_definition = fcc.get_eventhouse_definition(workspace_id=workspace_id, eventhouse_id=item_id)
36
+ self.assertIn("definition", eventhouse_definition)
37
+ definition = eventhouse_definition["definition"]
38
+
39
+ date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
40
+ date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
41
+ date_str = f"eventhouse{date_str}"
42
+
43
+ eventhouse_new = fcc.create_eventhouse(workspace_id=workspace_id, display_name=date_str, definition=definition)
44
+
45
+ self.assertEqual(eventhouse_new.display_name, date_str)
46
+
47
+ eventhouse_get = fcc.get_eventhouse(workspace_id=workspace_id, eventhouse_id=eventhouse_new.id)
48
+ self.assertEqual(eventhouse_get.display_name, date_str)
49
+
50
+ eventhouses = fcc.list_eventhouses(workspace_id=workspace_id)
51
+ self.assertEqual(len(eventhouses), 2)
52
+
53
+ date_str_updated = date_str + "_updated"
54
+ eventhouse_updated = fcc.update_eventhouse(workspace_id=workspace_id, eventhouse_id=eventhouse_new.id, display_name=date_str_updated, return_item=True)
55
+ self.assertEqual(eventhouse_updated.display_name, date_str_updated)
56
+
57
+ eventhouse_updated = fcc.update_eventhouse_definition(workspace_id=workspace_id, eventhouse_id=eventhouse_new.id, definition=definition)
58
+ self.assertIn(eventhouse_updated.status_code, [200, 202])
59
+
60
+ for eventhouse in eventhouses:
61
+ if eventhouse.id != item_id:
62
+ resp = fcc.delete_eventhouse(workspace_id=workspace_id, eventhouse_id=eventhouse.id)
63
+ self.assertEqual(resp, 200)
@@ -14,8 +14,8 @@ class TestFabricClientCore(unittest.TestCase):
14
14
 
15
15
  fc = self.fc
16
16
 
17
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
18
- item_id = "148ef579-4a5d-4048-8a48-0a703c5e3a1a"
17
+ workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
18
+ item_id = "82c01e0c-4cee-4a62-9806-870699ced699"
19
19
 
20
20
  recipient = {
21
21
  "userPrincipalName": "lisa4@fabrikam.com"
@@ -70,7 +70,7 @@ class TestFabricClientCore(unittest.TestCase):
70
70
  sleep(60)
71
71
  resp = fac.suspend_capacity(subscription_id, resource_group_name, capacity_name)
72
72
  self.assertEqual(resp.status_code, 202)
73
- sleep(60)
73
+ sleep(180)
74
74
 
75
75
  if sku != "F4":
76
76
  resp = fac.update_capacity(subscription_id, resource_group_name, capacity_name, sku="F4")
@@ -1,6 +1,7 @@
1
1
  import unittest
2
2
  from dotenv import load_dotenv
3
3
  from msfabricpysdkcore import FabricClientCore
4
+ from datetime import datetime
4
5
 
5
6
  load_dotenv()
6
7
 
@@ -15,23 +16,25 @@ class TestFabricClientCore(unittest.TestCase):
15
16
 
16
17
  workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
17
18
  folder_id = "d4f3a9fb-6975-4f5c-9c6b-ca205280966f"
19
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
20
+ display_name = f"sdk_folder_{datetime_str}"
18
21
 
19
- folder = fcc.create_folder(workspace_id=workspace_id, display_name="sdk_sub_folder", parent_folder_id=folder_id)
22
+ folder = fcc.create_folder(workspace_id=workspace_id, display_name=display_name, parent_folder_id=folder_id)
20
23
  self.assertIsNotNone(folder)
21
- self.assertEqual(folder.display_name, "sdk_sub_folder")
24
+ self.assertEqual(folder.display_name, display_name)
22
25
 
23
26
  folder_ = fcc.get_folder(workspace_id=workspace_id, folder_id=folder.id)
24
27
  self.assertEqual(folder.id, folder_.id)
25
28
 
26
29
  folders = fcc.list_folders(workspace_id=workspace_id)
27
- folders = [folder for folder in folders if folder.display_name == "sdk_sub_folder"]
30
+ folders = [folder for folder in folders if folder.display_name == display_name]
28
31
  self.assertGreater(len(folders), 0)
29
32
 
30
- folder = fcc.update_folder(workspace_id=workspace_id, folder_id=folder.id, display_name="sdk_sub_folder_updated")
31
- self.assertEqual(folder.display_name, "sdk_sub_folder_updated")
33
+ folder = fcc.update_folder(workspace_id=workspace_id, folder_id=folder.id, display_name=f"sdk_sub_folder_updated_{datetime_str}")
34
+ self.assertEqual(folder.display_name, f"sdk_sub_folder_updated_{datetime_str}")
32
35
 
33
36
  folder = fcc.move_folder(workspace_id=workspace_id, folder_id=folder.id)
34
- self.assertEqual(folder.display_name, "sdk_sub_folder_updated")
37
+ self.assertEqual(folder.display_name, f"sdk_sub_folder_updated_{datetime_str}")
35
38
  self.assertEqual(folder.parent_folder_id, "")
36
39
 
37
40
  folders = fcc.list_folders(workspace_id=workspace_id)
@@ -28,9 +28,9 @@ class TestFabricClientCore(unittest.TestCase):
28
28
  datetime_str = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
29
29
  display_name = 'fabricvnet-' + datetime_str
30
30
  gwr = {'displayName': display_name,
31
- 'capacityId': '339c785f-4489-46ae-a649-e049e7610479',
31
+ 'capacityId': '9e7e757d-d567-4fb3-bc4f-d230aabf2a00',
32
32
  'virtualNetworkAzureResource': {'virtualNetworkName': 'fabricvnet',
33
- 'subnetName': 'default3',
33
+ 'subnetName': 'default2',
34
34
  'resourceGroupName': 'fabricdemo',
35
35
  'subscriptionId': 'c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8'},
36
36
  'inactivityMinutesBeforeSleep': 30,
@@ -54,7 +54,7 @@ class TestFabricClientCore(unittest.TestCase):
54
54
 
55
55
  new_ras = fc.add_gateway_role_assignment(gw['id'], principal, 'ConnectionCreator')
56
56
  self.assertIn("id", new_ras)
57
- self.assertEqual(2, len(fc.list_gateway_role_assignments(gw['id'])))
57
+ #self.assertEqual(2, len(fc.list_gateway_role_assignments(gw['id'])))
58
58
 
59
59
  new_ras = fc.update_gateway_role_assignment(gw['id'], new_ras['id'], 'Admin')
60
60
  self.assertEqual('Admin', new_ras['role'])
@@ -18,7 +18,7 @@ class TestFabricClientCore(unittest.TestCase):
18
18
  ws2_name = "git" + datetime_str
19
19
  self.fc.create_workspace(display_name=ws2_name)
20
20
  ws2 = self.fc.get_workspace_by_name(name=ws2_name)
21
- self.fc.assign_to_capacity(workspace_id=ws2.id, capacity_id="840a6c1e-5289-4094-bbc8-716daabaeeba")
21
+ self.fc.assign_to_capacity(workspace_id=ws2.id, capacity_id="9e7e757d-d567-4fb3-bc4f-d230aabf2a00")
22
22
 
23
23
  git_provider_details = {'organizationName': 'MngEnvMCAP065039',
24
24
  'projectName': 'fabricdevops',
@@ -12,7 +12,7 @@ class TestFabricClientCore(unittest.TestCase):
12
12
  super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
13
  #load_dotenv()
14
14
  self.fc = FabricClientCore()
15
- self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
15
+ self.workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
16
16
 
17
17
 
18
18
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
@@ -90,7 +90,7 @@ class TestFabricClientCore(unittest.TestCase):
90
90
  def test_item_connections(self):
91
91
 
92
92
  fc = self.fc
93
- connections = fc.list_item_connections(workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3', item_id = '1bcc8b76-8e1f-428d-a594-f91ce1b9b076')
93
+ connections = fc.list_item_connections(workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3', item_id = '82c01e0c-4cee-4a62-9806-870699ced699')
94
94
  self.assertEqual(len(connections), 0)
95
95
 
96
96
  if __name__ == "__main__":