msfabricpysdkcore 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. msfabricpysdkcore/coreapi.py +742 -32
  2. msfabricpysdkcore/otheritems.py +61 -1
  3. msfabricpysdkcore/workspace.py +161 -9
  4. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/METADATA +4 -2
  5. msfabricpysdkcore-0.2.7.dist-info/RECORD +30 -0
  6. msfabricpysdkcore/tests/__init__.py +0 -0
  7. msfabricpysdkcore/tests/test_admin_apis.py +0 -174
  8. msfabricpysdkcore/tests/test_admin_tags.py +0 -46
  9. msfabricpysdkcore/tests/test_connection.py +0 -111
  10. msfabricpysdkcore/tests/test_copy_jobs.py +0 -60
  11. msfabricpysdkcore/tests/test_dataflows.py +0 -60
  12. msfabricpysdkcore/tests/test_datapipelines.py +0 -60
  13. msfabricpysdkcore/tests/test_deployment_pipeline.py +0 -63
  14. msfabricpysdkcore/tests/test_deployment_pipelinev2.py +0 -135
  15. msfabricpysdkcore/tests/test_domains.py +0 -126
  16. msfabricpysdkcore/tests/test_environments.py +0 -114
  17. msfabricpysdkcore/tests/test_evenhouses.py +0 -56
  18. msfabricpysdkcore/tests/test_evenstreams.py +0 -52
  19. msfabricpysdkcore/tests/test_eventstream_topology.py +0 -82
  20. msfabricpysdkcore/tests/test_external_data_shares.py +0 -51
  21. msfabricpysdkcore/tests/test_fabric_azure_client.py +0 -80
  22. msfabricpysdkcore/tests/test_folders.py +0 -53
  23. msfabricpysdkcore/tests/test_gateways.py +0 -99
  24. msfabricpysdkcore/tests/test_git.py +0 -66
  25. msfabricpysdkcore/tests/test_graphqlapi.py +0 -44
  26. msfabricpysdkcore/tests/test_items.py +0 -97
  27. msfabricpysdkcore/tests/test_jobs.py +0 -96
  28. msfabricpysdkcore/tests/test_kql_dashboards.py +0 -63
  29. msfabricpysdkcore/tests/test_kql_queryset.py +0 -64
  30. msfabricpysdkcore/tests/test_kqldatabases.py +0 -56
  31. msfabricpysdkcore/tests/test_lakehouse.py +0 -93
  32. msfabricpysdkcore/tests/test_managed_private_endpoints.py +0 -61
  33. msfabricpysdkcore/tests/test_mirroreddatabases.py +0 -80
  34. msfabricpysdkcore/tests/test_ml_experiments.py +0 -47
  35. msfabricpysdkcore/tests/test_ml_models.py +0 -47
  36. msfabricpysdkcore/tests/test_mounted_adf.py +0 -64
  37. msfabricpysdkcore/tests/test_notebooks.py +0 -57
  38. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +0 -63
  39. msfabricpysdkcore/tests/test_other_items.py +0 -45
  40. msfabricpysdkcore/tests/test_reflex.py +0 -57
  41. msfabricpysdkcore/tests/test_reports.py +0 -56
  42. msfabricpysdkcore/tests/test_semantic_model.py +0 -56
  43. msfabricpysdkcore/tests/test_shortcuts.py +0 -60
  44. msfabricpysdkcore/tests/test_spark.py +0 -91
  45. msfabricpysdkcore/tests/test_sparkjobdefinition.py +0 -55
  46. msfabricpysdkcore/tests/test_sqldatabases.py +0 -45
  47. msfabricpysdkcore/tests/test_tags.py +0 -28
  48. msfabricpysdkcore/tests/test_variable_libary.py +0 -61
  49. msfabricpysdkcore/tests/test_warehouses.py +0 -50
  50. msfabricpysdkcore/tests/test_workspaces_capacities.py +0 -159
  51. msfabricpysdkcore-0.2.5.dist-info/RECORD +0 -75
  52. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/WHEEL +0 -0
  53. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/licenses/LICENSE +0 -0
  54. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/top_level.txt +0 -0
@@ -1,111 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- def test_connection(self):
17
-
18
- datetime_str = datetime.now().strftime("%Y%m%H%M%S")
19
- datetime_str
20
- fc = self.fc
21
-
22
- # display_name = "ContosoCloudConnection" + datetime_str
23
-
24
- # cr = {"connectivityType": "ShareableCloud",
25
- # "displayName": display_name,
26
- # "connectionDetails": {
27
- # 'type': "SQL",
28
- # 'creationMethod': 'SQL',
29
- # "parameters": [
30
- # {
31
- # "dataType": "Text",
32
- # "name": "server",
33
- # "value": "dfsdemo.database.windows.net"
34
- # },
35
- # {
36
- # "dataType": "Text",
37
- # "name": "database",
38
- # "value": "dfsdemo"
39
- # }
40
- # ]},
41
- # 'privacyLevel': 'Organizational',
42
- # 'credentialDetails': {'credentials':{'credentialType': 'Basic',
43
- # 'userName': 'new_user',
44
- # 'password': 'StrongPassword123!'},
45
- # 'singleSignOnType': 'None',
46
- # 'connectionEncryption': 'NotEncrypted',
47
- # 'skipTestConnection': False}
48
- # }
49
-
50
-
51
-
52
- # connection = fc.create_connection(connection_request=cr)
53
- # self.assertIsNotNone(connection)
54
- # self.assertIn('id', connection)
55
- # self.assertIn('displayName', connection)
56
- # self.assertEqual(connection['displayName'], display_name)
57
-
58
- # connection2 = fc.get_connection(connection_name=display_name)
59
- # self.assertEqual(connection['id'], connection2['id'])
60
-
61
-
62
- # connections = fc.list_connections()
63
- # connection_names = [conn['displayName'] for conn in connections]
64
- # self.assertIn(display_name, connection_names)
65
-
66
- # id = connection['id']
67
-
68
- # role_assis = fc.list_connection_role_assignments(connection_id=id)
69
- # self.assertEqual(len(role_assis), 1)
70
-
71
- # principal = {"id" : "755f273c-98f8-408c-a886-691794938bd8",
72
- # "type" : "ServicePrincipal"}
73
-
74
- # add_role_assi = fc.add_connection_role_assignment(connection_id=id, principal=principal, role='User')
75
- # self.assertIsNotNone(add_role_assi)
76
- # self.assertIn('id', add_role_assi)
77
- # role_assi_id = add_role_assi['id']
78
-
79
- # role_assis = fc.list_connection_role_assignments(connection_id=id)
80
- # self.assertEqual(len(role_assis), 2)
81
-
82
- # role_assi = fc.get_connection_role_assignment(connection_id=id,
83
- # connection_role_assignment_id=role_assi_id)
84
- # self.assertEqual(role_assi['id'], role_assi_id)
85
-
86
- # role_assi = fc.update_connection_role_assignment(connection_id=id,
87
- # connection_role_assignment_id=role_assi_id,
88
- # role='UserWithReshare')
89
- # self.assertEqual(role_assi['role'], 'UserWithReshare')
90
-
91
- # status_code = fc.delete_connection_role_assignment(connection_id=id,
92
- # connection_role_assignment_id=role_assi_id)
93
- # self.assertEqual(status_code, 200)
94
-
95
-
96
- # cr = {
97
- # "connectivityType": "ShareableCloud",
98
- # "displayName": f"sqlserver{datetime_str}"
99
- # }
100
-
101
- # updated_connection = fc.update_connection(connection_id=id, connection_request=cr)
102
- # self.assertIsNotNone(updated_connection)
103
-
104
-
105
- # connection2 = fc.get_connection(connection_id=id)
106
- # self.assertEqual(connection['id'], connection2['id'])
107
- # self.assertEqual(connection2['displayName'], f"sqlserver{datetime_str}")
108
-
109
- # status_code = fc.delete_connection(connection_id=id)
110
- # self.assertEqual(status_code, 200)
111
-
@@ -1,60 +0,0 @@
1
- import unittest
2
- from dotenv import load_dotenv
3
- from msfabricpysdkcore import FabricClientCore
4
- from datetime import datetime
5
- load_dotenv()
6
-
7
- class TestFabricClientCore(unittest.TestCase):
8
-
9
- def __init__(self, *args, **kwargs):
10
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
11
- self.fcc = FabricClientCore()
12
-
13
- def test_copy_jobs(self):
14
- fcc = self.fcc
15
-
16
- workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
17
- item_id = "a9e59ec1-524b-49b1-a185-37e47dc0ceb9"
18
-
19
- copy_jobs = fcc.list_copy_jobs(workspace_id=workspace_id)
20
- for copy_job in copy_jobs:
21
- if copy_job.id != item_id:
22
- resp = fcc.delete_copy_job(workspace_id=workspace_id, copy_job_id=copy_job.id)
23
- self.assertEqual(resp, 200)
24
-
25
- copy_job_definition = fcc.get_copy_job_definition(workspace_id=workspace_id, copy_job_id=item_id)
26
- self.assertIn("definition", copy_job_definition)
27
- definition = copy_job_definition["definition"]
28
-
29
- date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
30
- date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
31
- date_str = f"copyjob{date_str}"
32
-
33
- copy_job_new = fcc.create_copy_job(workspace_id=workspace_id, display_name=date_str, definition=definition)
34
-
35
- self.assertEqual(copy_job_new.display_name, date_str)
36
-
37
- copy_job_get = fcc.get_copy_job(workspace_id=workspace_id, copy_job_id=copy_job_new.id)
38
- self.assertEqual(copy_job_get.display_name, date_str)
39
-
40
- copy_jobs = fcc.list_copy_jobs(workspace_id=workspace_id)
41
- self.assertEqual(len(copy_jobs), 2)
42
-
43
- date_str_updated = date_str + "_updated"
44
- copy_job_updated = fcc.update_copy_job(workspace_id=workspace_id, copy_job_id=copy_job_new.id, display_name=date_str_updated, return_item=True)
45
- self.assertEqual(copy_job_updated.display_name, date_str_updated)
46
-
47
- copy_job_updated = fcc.update_copy_job_definition(workspace_id=workspace_id, copy_job_id=copy_job_new.id, definition=definition)
48
- self.assertEqual(copy_job_updated.status_code, 200)
49
-
50
- for copy_job in copy_jobs:
51
- if copy_job.id != item_id:
52
- resp = fcc.delete_copy_job(workspace_id=workspace_id, copy_job_id=copy_job.id)
53
- self.assertEqual(resp, 200)
54
-
55
-
56
-
57
-
58
-
59
-
60
-
@@ -1,60 +0,0 @@
1
- import unittest
2
- from dotenv import load_dotenv
3
- from msfabricpysdkcore import FabricClientCore
4
- from datetime import datetime
5
- load_dotenv()
6
-
7
- class TestFabricClientCore(unittest.TestCase):
8
-
9
- def __init__(self, *args, **kwargs):
10
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
11
- self.fcc = FabricClientCore()
12
-
13
- def test_dataflows(self):
14
- fcc = self.fcc
15
-
16
- workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
17
- item_id = "8bc6f2f1-2ef9-4dc1-ab47-f55aa90e4088"
18
-
19
- dataflows = fcc.list_dataflows(workspace_id=workspace_id)
20
- for dataflow in dataflows:
21
- if dataflow.id != item_id:
22
- resp = fcc.delete_dataflow(workspace_id=workspace_id, dataflow_id=dataflow.id)
23
- self.assertEqual(resp, 200)
24
-
25
- dataflow_definition = fcc.get_dataflow_definition(workspace_id=workspace_id, dataflow_id=item_id)
26
- self.assertIn("definition", dataflow_definition)
27
- definition = dataflow_definition["definition"]
28
-
29
- date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
30
- date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
31
- date_str = f"dataflow{date_str}"
32
-
33
- dataflow_new = fcc.create_dataflow(workspace_id=workspace_id, display_name=date_str, definition=definition)
34
-
35
- self.assertEqual(dataflow_new.display_name, date_str)
36
-
37
- dataflow_get = fcc.get_dataflow(workspace_id=workspace_id, dataflow_id=dataflow_new.id)
38
- self.assertEqual(dataflow_get.display_name, date_str)
39
-
40
- dataflows = fcc.list_dataflows(workspace_id=workspace_id)
41
- self.assertEqual(len(dataflows), 2)
42
-
43
- date_str_updated = date_str + "_updated"
44
- dataflow_updated = fcc.update_dataflow(workspace_id=workspace_id, dataflow_id=dataflow_new.id, display_name=date_str_updated, return_item=True)
45
- self.assertEqual(dataflow_updated.display_name, date_str_updated)
46
-
47
- dataflow_updated = fcc.update_dataflow_definition(workspace_id=workspace_id, dataflow_id=dataflow_new.id, definition=definition)
48
- self.assertEqual(dataflow_updated.status_code, 200)
49
-
50
- for dataflow in dataflows:
51
- if dataflow.id != item_id:
52
- resp = fcc.delete_dataflow(workspace_id=workspace_id, dataflow_id=dataflow.id)
53
- self.assertEqual(resp, 200)
54
-
55
-
56
-
57
-
58
-
59
-
60
-
@@ -1,60 +0,0 @@
1
- import unittest
2
- from dotenv import load_dotenv
3
- from msfabricpysdkcore import FabricClientCore
4
- from datetime import datetime
5
- load_dotenv()
6
-
7
- class TestFabricClientCore(unittest.TestCase):
8
-
9
- def __init__(self, *args, **kwargs):
10
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
11
- self.fcc = FabricClientCore()
12
-
13
- def test_data_pipelines(self):
14
- fcc = self.fcc
15
-
16
- workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
17
- item_id = "b7746e38-5409-487a-969c-fb7cb026b5d3"
18
-
19
- data_pipelines = fcc.list_data_pipelines(workspace_id=workspace_id)
20
- for data_pipeline in data_pipelines:
21
- if data_pipeline.id != item_id:
22
- resp = fcc.delete_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline.id)
23
- self.assertEqual(resp, 200)
24
-
25
- data_pipeline_definition = fcc.get_data_pipeline_definition(workspace_id=workspace_id, data_pipeline_id=item_id)
26
- self.assertIn("definition", data_pipeline_definition)
27
- definition = data_pipeline_definition["definition"]
28
-
29
- date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
30
- date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
31
- date_str = f"data_pipeline{date_str}"
32
-
33
- data_pipeline_new = fcc.create_data_pipeline(workspace_id=workspace_id, display_name=date_str, definition=definition)
34
-
35
- self.assertEqual(data_pipeline_new.display_name, date_str)
36
-
37
- data_pipeline_get = fcc.get_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline_new.id)
38
- self.assertEqual(data_pipeline_get.display_name, date_str)
39
-
40
- data_pipelines = fcc.list_data_pipelines(workspace_id=workspace_id)
41
- self.assertEqual(len(data_pipelines), 2)
42
-
43
- date_str_updated = date_str + "_updated"
44
- data_pipeline_updated = fcc.update_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline_new.id, display_name=date_str_updated, return_item=True)
45
- self.assertEqual(data_pipeline_updated.display_name, date_str_updated)
46
-
47
- data_pipeline_updated = fcc.update_data_pipeline_definition(workspace_id=workspace_id, data_pipeline_id=data_pipeline_new.id, definition=definition)
48
- self.assertEqual(data_pipeline_updated.status_code, 200)
49
-
50
- for data_pipeline in data_pipelines:
51
- if data_pipeline.id != item_id:
52
- resp = fcc.delete_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline.id)
53
- self.assertEqual(resp, 200)
54
-
55
-
56
-
57
-
58
-
59
-
60
-
@@ -1,63 +0,0 @@
1
- import unittest
2
- from msfabricpysdkcore.coreapi import FabricClientCore
3
- from dotenv import load_dotenv
4
-
5
- load_dotenv()
6
-
7
-
8
- class TestFabricClientCore(unittest.TestCase):
9
-
10
- def __init__(self, *args, **kwargs):
11
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
12
- self.fc = FabricClientCore()
13
-
14
-
15
-
16
- def test_spark_workspace_custom_pools(self):
17
- fc = self.fc
18
-
19
- dep_pipes = fc.list_deployment_pipelines()
20
-
21
- self.assertGreater(len(dep_pipes), 0)
22
-
23
- self.assertIn("sdkpipe", [pipe.display_name for pipe in dep_pipes])
24
-
25
- for pipe in dep_pipes:
26
- if pipe.display_name == 'sdkpipe':
27
- pipe_id = pipe.id
28
- break
29
-
30
- pipe = fc.get_deployment_pipeline(pipe_id)
31
-
32
- self.assertEqual(pipe.display_name, 'sdkpipe')
33
- self.assertEqual(pipe.id, pipe_id)
34
-
35
- stages = fc.list_deployment_pipeline_stages(pipe_id)
36
-
37
- self.assertGreater(len(stages), 0)
38
- names = [stage.display_name for stage in stages]
39
- self.assertIn("Development", names)
40
- self.assertIn("Production", names)
41
-
42
- dev_stage = [stage for stage in stages if stage.display_name == "Development"][0]
43
- prod_stage = [stage for stage in stages if stage.display_name == "Production"][0]
44
-
45
- items = fc.list_deployment_pipeline_stages_items(deployment_pipeline_id=pipe_id, stage_id=dev_stage.id)
46
-
47
- self.assertGreater(len(items), 0)
48
- self.assertIn("cicdlakehouse", [item["itemDisplayName"] for item in items])
49
-
50
- items = [item for item in dev_stage.list_items() if item["itemDisplayName"] == 'cicdlakehouse']
51
- item = items[0]
52
- item = {"sourceItemId": item["itemId"],
53
- "itemType": item["itemType"]}
54
- items = [item]
55
-
56
- response = fc.deploy_stage_content(deployment_pipeline_id=pipe_id, source_stage_id=dev_stage.id,target_stage_id=prod_stage.id, items=items)
57
-
58
- self.assertEqual(response["status"], "Succeeded")
59
-
60
-
61
-
62
-
63
-
@@ -1,135 +0,0 @@
1
- import unittest
2
- from msfabricpysdkcore.coreapi import FabricClientCore
3
- from datetime import datetime
4
- from dotenv import load_dotenv
5
- import time
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- def test_deployment_pipeline2(self):
17
- fcc = self.fc
18
- workspace_id = "72d9d955-bd1e-42c7-9746-208f7cbc8956"
19
-
20
- user_id = "e0505016-ef55-4ca7-b106-e085cc201823"
21
- capacity_id = "9e7e757d-d567-4fb3-bc4f-d230aabf2a00"
22
-
23
- prod_workspace = fcc.create_workspace("sdkswedenproddeploy")
24
- prod_workspace.assign_to_capacity(capacity_id)
25
-
26
- stages = [
27
- {
28
- "displayName": "Development",
29
- "description": "Development stage description",
30
- "isPublic": False
31
- },
32
- {
33
- "displayName": "Production",
34
- "description": "Production stage description",
35
- "isPublic":True
36
- }
37
- ]
38
-
39
- pipes = fcc.list_deployment_pipelines(with_details=False)
40
- for pipe in pipes:
41
- if "sdk" in pipe["displayName"]:
42
- fcc.delete_deployment_pipeline(deployment_pipeline_id=pipe["id"])
43
-
44
- pipe =fcc.create_deployment_pipeline(display_name="sdktestpipeline",
45
- description="Test Deployment Pipeline Description",
46
- stages=stages)
47
-
48
- self.assertIsNotNone(pipe.id)
49
- pipe_id = pipe.id
50
-
51
- for stage in pipe.stages:
52
- if stage["displayName"] == "Development":
53
- dev_stage = stage
54
- else:
55
- prod_stage = stage
56
-
57
- stage = fcc.get_deployment_pipeline_stage(deployment_pipeline_id=pipe_id,
58
- stage_id=dev_stage["id"])
59
- self.assertIsNotNone(stage.id)
60
- resp = fcc.assign_workspace_to_stage(deployment_pipeline_id=pipe_id,
61
- stage_id=dev_stage["id"],
62
- workspace_id=workspace_id)
63
- self.assertEqual(resp, 200)
64
-
65
- resp = fcc.assign_workspace_to_stage(deployment_pipeline_id=pipe_id,
66
- stage_id=prod_stage["id"],
67
- workspace_id=prod_workspace.id)
68
- self.assertEqual(resp, 200)
69
- principal = {
70
- "id": user_id,
71
- "type": "User"
72
- }
73
-
74
- resp = fcc.add_deployment_pipeline_role_assignment(deployment_pipeline_id=pipe_id,principal=principal, role="Admin")
75
- self.assertEqual(resp, 200)
76
-
77
- roles = fcc.list_deployment_pipeline_role_assignments(deployment_pipeline_id=pipe_id)
78
- self.assertTrue(len(roles) == 2)
79
-
80
- resp = fcc.delete_deployment_pipeline_role_assignment(deployment_pipeline_id=pipe_id, principal_id=user_id)
81
- self.assertEqual(resp, 200)
82
-
83
- roles = fcc.list_deployment_pipeline_role_assignments(deployment_pipeline_id=pipe_id)
84
- self.assertTrue(len(roles) == 1)
85
-
86
- pipes = fcc.list_deployment_pipelines(with_details=False)
87
- sdk_pipes = [pipe for pipe in pipes if "sdk" in pipe["displayName"]]
88
- self.assertTrue(len(sdk_pipes) > 0)
89
-
90
- resp = fcc.deploy_stage_content(deployment_pipeline_id=pipe_id,
91
- source_stage_id=dev_stage["id"],
92
- target_stage_id=prod_stage["id"], wait_for_completion=False)
93
- self.assertEqual(resp.status_code, 202)
94
-
95
- ops = fcc.list_deployment_pipeline_operations(deployment_pipeline_id=pipe_id)
96
- self.assertTrue(len(ops) > 0)
97
-
98
- ops = fcc.get_deployment_pipeline_operation(deployment_pipeline_id=pipe_id, operation_id=ops[0]["id"])
99
- self.assertIsNotNone(ops["id"])
100
-
101
- stages = fcc.list_deployment_pipeline_stages(deployment_pipeline_id=pipe_id)
102
- self.assertTrue(len(stages) == 2)
103
-
104
- items = fcc.list_deployment_pipeline_stage_items(deployment_pipeline_id=pipe_id, stage_id=dev_stage["id"])
105
- self.assertTrue(len(items) == 1)
106
-
107
- updated_pipe = fcc.update_deployment_pipeline(deployment_pipeline_id=pipe.id, display_name="sdknewname", description="newdescription")
108
- self.assertIsNotNone(updated_pipe.id)
109
-
110
- pipe = fcc.get_deployment_pipeline(pipe_id)
111
- self.assertIsNotNone(pipe.id)
112
- self.assertTrue(pipe.display_name == "sdknewname")
113
-
114
- updated_stage = fcc.update_deployment_pipeline_stage(deployment_pipeline_id=pipe_id, stage_id=prod_stage["id"],
115
- display_name="newname", description="newdescription")
116
- self.assertIsNotNone(updated_stage["id"])
117
-
118
- stage = fcc.get_deployment_pipeline_stage(deployment_pipeline_id=pipe_id, stage_id=prod_stage["id"])
119
- self.assertIsNotNone(stage.id)
120
- self.assertTrue(stage.display_name == "newname")
121
-
122
- for _ in range(10):
123
- ops = fcc.get_deployment_pipeline_operation(deployment_pipeline_id=pipe_id, operation_id=ops["id"])
124
- if ops["status"] != "Running":
125
- break
126
- else:
127
- time.sleep(5)
128
-
129
- resp = fcc.unassign_workspace_from_stage(deployment_pipeline_id=pipe_id,stage_id=prod_stage["id"])
130
- self.assertEqual(resp, 200)
131
-
132
- prod_workspace.delete()
133
-
134
- resp = fcc.delete_deployment_pipeline(deployment_pipeline_id=pipe_id)
135
- self.assertEqual(resp, 200)
@@ -1,126 +0,0 @@
1
- import unittest
2
- from dotenv import load_dotenv
3
- from datetime import datetime
4
- from msfabricpysdkcore import FabricClientCore, FabricClientAdmin
5
-
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
-
14
-
15
- def test_domains(self):
16
- fcc = FabricClientCore()
17
- fca = FabricClientAdmin()
18
-
19
- ws = fcc.get_workspace_by_name("sdktestdomains")
20
- cap = fcc.get_capacity(capacity_id=ws.capacity_id)
21
- principal = {'id': '1dc64c6e-7a10-4ea9-8488-85d0739a377d', 'type': 'User'}
22
-
23
- # Delete if exists
24
- try:
25
- domain = fca.get_domain_by_name("sdktestdomains")
26
- domain.delete()
27
- except:
28
- pass
29
- try:
30
- domain = fca.get_domain_by_name("sdktestdomains2")
31
- domain.delete()
32
- except:
33
- pass
34
-
35
- # Create domain
36
- domain_name = "sdktestdomains" + datetime.now().strftime("%Y%m%d%H%M%S")
37
- domain = fca.create_domain(display_name=domain_name)
38
- self.assertIsNotNone(domain.id)
39
- self.assertEqual(domain.display_name, domain_name)
40
-
41
- # Get domain by name
42
- domain_clone = fca.get_domain_by_name(domain_name)
43
- self.assertIsNotNone(domain_clone.id)
44
- self.assertEqual(domain_clone.display_name, domain_name)
45
-
46
- # Get domain by id
47
- domain_clone = fca.get_domain_by_id(domain.id)
48
- self.assertIsNotNone(domain_clone.id)
49
- self.assertEqual(domain_clone.display_name, domain_name)
50
-
51
- # List domains
52
- domains = fca.list_domains()
53
- self.assertGreater(len(domains), 0)
54
- domains_ids = [d.id for d in domains]
55
- self.assertIn(domain.id, domains_ids)
56
-
57
- # Update domain
58
- domain_new_name = f"{domain_name}2"
59
- domain_clone = fca.update_domain(domain.id, display_name=domain_new_name, return_item=True)
60
- self.assertEqual(domain_clone.display_name, domain_new_name)
61
-
62
- # Assign domain workspaces by Ids
63
- status_code = fca.assign_domain_workspaces_by_ids(domain.id, [ws.id])
64
- self.assertEqual(status_code, 200)
65
-
66
- # List domain workspaces
67
- workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=True)
68
- self.assertGreater(len(workspaces), 0)
69
- workspaces_ids = [w.id for w in workspaces]
70
- self.assertIn(ws.id, workspaces_ids)
71
-
72
- # Unassign domain workspaces by ids
73
- status_code = fca.unassign_domain_workspaces_by_ids(domain.id, [ws.id])
74
- self.assertEqual(status_code, 200)
75
-
76
- workspaces = fca.list_domain_workspaces(domain.id)
77
- self.assertEqual(len(workspaces), 0)
78
-
79
- # Assign domain workspaces by capacities
80
- status_code = fca.assign_domain_workspaces_by_capacities(domain.id, [cap.id])
81
- self.assertEqual(status_code, 202)
82
-
83
- workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=True)
84
- self.assertGreater(len(workspaces), 0)
85
- workspaces_ids = [w.id for w in workspaces]
86
- self.assertIn(ws.id, workspaces_ids)
87
-
88
- # Unassign all domain workspaces
89
- status_code = fca.unassign_all_domain_workspaces(domain.id)
90
- self.assertEqual(status_code, 200)
91
-
92
- workspaces = fca.list_domain_workspaces(domain.id)
93
- self.assertEqual(len(workspaces), 0)
94
-
95
- # Assign domain workspaces by principals
96
- status_code = fca.assign_domains_workspaces_by_principals(domain.id, [principal], wait_for_completion=True)
97
-
98
- self.assertEqual(status_code, 202)
99
-
100
- workspaces = fca.list_domain_workspaces(domain.id, workspace_objects=True)
101
- self.assertGreater(len(workspaces), 0)
102
- workspaces_ids = [w.id for w in workspaces]
103
- self.assertIn(ws.id, workspaces_ids)
104
-
105
- # Role assignments bulk assign
106
-
107
- principal_2 = {'id': 'e0505016-ef55-4ca7-b106-e085cc201823', 'type': 'User'}
108
- principals = [principal, principal_2]
109
-
110
- status_code = fca.role_assignments_bulk_assign(domain.id, "Contributors", principals)
111
-
112
- self.assertEqual(status_code, 200)
113
-
114
- # Role assignments bulk unassign
115
- status_code = fca.role_assignments_bulk_unassign(domain.id, "Contributors", [principal_2])
116
-
117
- self.assertEqual(status_code, 200)
118
-
119
- # Delete domain
120
- status_code = fca.delete_domain(domain.id)
121
-
122
- self.assertEqual(status_code, 200)
123
-
124
- domains = fca.list_domains()
125
- domains_ids = [d.id for d in domains]
126
- self.assertNotIn(domain.id, domains_ids)