msfabricpysdkcore 0.2.7__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/coreapi.py +6 -6
- msfabricpysdkcore/tests/__init__.py +0 -0
- msfabricpysdkcore/tests/test_admin_apis.py +174 -0
- msfabricpysdkcore/tests/test_admin_tags.py +46 -0
- msfabricpysdkcore/tests/test_apache_airflow_job.py +60 -0
- msfabricpysdkcore/tests/test_connection.py +111 -0
- msfabricpysdkcore/tests/test_copy_jobs.py +60 -0
- msfabricpysdkcore/tests/test_dataflows.py +60 -0
- msfabricpysdkcore/tests/test_datapipelines.py +60 -0
- msfabricpysdkcore/tests/test_deployment_pipelinev2.py +135 -0
- msfabricpysdkcore/tests/test_digital_twin_builder.py +60 -0
- msfabricpysdkcore/tests/test_domains.py +119 -0
- msfabricpysdkcore/tests/test_environments.py +121 -0
- msfabricpysdkcore/tests/test_evenstreams.py +57 -0
- msfabricpysdkcore/tests/test_eventhouses.py +63 -0
- msfabricpysdkcore/tests/test_eventstream_topology.py +82 -0
- msfabricpysdkcore/tests/test_external_data_shares.py +51 -0
- msfabricpysdkcore/tests/test_fabric_azure_client.py +80 -0
- msfabricpysdkcore/tests/test_folders.py +56 -0
- msfabricpysdkcore/tests/test_gateways.py +99 -0
- msfabricpysdkcore/tests/test_git.py +66 -0
- msfabricpysdkcore/tests/test_graphqlapi.py +44 -0
- msfabricpysdkcore/tests/test_items.py +97 -0
- msfabricpysdkcore/tests/test_jobs.py +98 -0
- msfabricpysdkcore/tests/test_kql_dashboards.py +63 -0
- msfabricpysdkcore/tests/test_kql_queryset.py +60 -0
- msfabricpysdkcore/tests/test_kqldatabases.py +56 -0
- msfabricpysdkcore/tests/test_lakehouse.py +89 -0
- msfabricpysdkcore/tests/test_managed_private_endpoints.py +62 -0
- msfabricpysdkcore/tests/test_mirrored_azuredatabricks_catalog.py +81 -0
- msfabricpysdkcore/tests/test_mirroreddatabases.py +80 -0
- msfabricpysdkcore/tests/test_ml_experiments.py +45 -0
- msfabricpysdkcore/tests/test_ml_models.py +46 -0
- msfabricpysdkcore/tests/test_mounted_adf.py +64 -0
- msfabricpysdkcore/tests/test_notebooks.py +58 -0
- msfabricpysdkcore/tests/test_one_lake_data_access_security.py +63 -0
- msfabricpysdkcore/tests/test_other_items.py +45 -0
- msfabricpysdkcore/tests/test_reflex.py +56 -0
- msfabricpysdkcore/tests/test_reports.py +56 -0
- msfabricpysdkcore/tests/test_semantic_model.py +56 -0
- msfabricpysdkcore/tests/test_shortcuts.py +59 -0
- msfabricpysdkcore/tests/test_spark.py +91 -0
- msfabricpysdkcore/tests/test_sparkjobdefinition.py +55 -0
- msfabricpysdkcore/tests/test_sql_endpoint.py +28 -0
- msfabricpysdkcore/tests/test_sqldatabases.py +45 -0
- msfabricpysdkcore/tests/test_tags.py +28 -0
- msfabricpysdkcore/tests/test_variable_libary.py +61 -0
- msfabricpysdkcore/tests/test_warehouses.py +50 -0
- msfabricpysdkcore/tests/test_workspaces_capacities.py +159 -0
- msfabricpysdkcore/workspace.py +22 -4
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/METADATA +1 -1
- msfabricpysdkcore-0.2.8.dist-info/RECORD +78 -0
- msfabricpysdkcore-0.2.7.dist-info/RECORD +0 -30
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/WHEEL +0 -0
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/licenses/LICENSE +0 -0
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,51 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
4
|
+
|
5
|
+
load_dotenv()
|
6
|
+
|
7
|
+
class TestFabricClientCore(unittest.TestCase):
|
8
|
+
|
9
|
+
def __init__(self, *args, **kwargs):
|
10
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
11
|
+
self.fc = FabricClientCore()
|
12
|
+
|
13
|
+
def test_external_data_shares(self):
|
14
|
+
|
15
|
+
fc = self.fc
|
16
|
+
|
17
|
+
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
|
18
|
+
item_id = "82c01e0c-4cee-4a62-9806-870699ced699"
|
19
|
+
|
20
|
+
recipient = {
|
21
|
+
"userPrincipalName": "lisa4@fabrikam.com"
|
22
|
+
}
|
23
|
+
paths=["Files/to_share"]
|
24
|
+
|
25
|
+
resp = fc.create_external_data_share(workspace_id, item_id, paths, recipient)
|
26
|
+
self.assertIsNotNone(resp)
|
27
|
+
self.assertIn('id', resp)
|
28
|
+
|
29
|
+
|
30
|
+
get = fc.get_external_data_share(workspace_id, item_id, resp['id'])
|
31
|
+
self.assertIsNotNone(get)
|
32
|
+
self.assertEqual(get['id'], resp['id'])
|
33
|
+
|
34
|
+
|
35
|
+
resp = fc.list_external_data_shares_in_item(workspace_id, item_id)
|
36
|
+
self.assertGreater(len(resp), 0)
|
37
|
+
|
38
|
+
data_share_ids = [ds['id'] for ds in resp]
|
39
|
+
self.assertIn(get['id'], data_share_ids)
|
40
|
+
|
41
|
+
|
42
|
+
resp = fc.revoke_external_data_share(workspace_id, item_id, get['id'])
|
43
|
+
self.assertEqual(resp, 200)
|
44
|
+
|
45
|
+
get2 = fc.get_external_data_share(workspace_id, item_id, get['id'])
|
46
|
+
self.assertIsNotNone(get2)
|
47
|
+
|
48
|
+
self.assertEqual(get['id'], get2['id'])
|
49
|
+
self.assertEqual(get2['status'], 'Revoked')
|
50
|
+
|
51
|
+
|
@@ -0,0 +1,80 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from datetime import datetime
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore import FabricAzureClient
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
|
10
|
+
class TestFabricClientCore(unittest.TestCase):
|
11
|
+
|
12
|
+
def __init__(self, *args, **kwargs):
|
13
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
14
|
+
#load_dotenv()
|
15
|
+
self.fac = FabricAzureClient()
|
16
|
+
|
17
|
+
def test_azure_capacity(self):
|
18
|
+
|
19
|
+
fac = self.fac
|
20
|
+
|
21
|
+
subscription_id = "c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8"
|
22
|
+
resource_group_name = "fabricdemo"
|
23
|
+
capacity_name = "westeuropeajrederer"
|
24
|
+
capacity_name_new = "westeuropeajrederer" + datetime.now().strftime("%Y%m%d%H%M%S")
|
25
|
+
|
26
|
+
resp = fac.check_name_availability(subscription_id, "westeurope", capacity_name_new)
|
27
|
+
self.assertIn('nameAvailable', resp)
|
28
|
+
self.assertEqual(resp['nameAvailable'], True)
|
29
|
+
|
30
|
+
resp = fac.create_or_update_capacity(subscription_id, resource_group_name, capacity_name_new,
|
31
|
+
location="westeurope",
|
32
|
+
properties_administration={"members": ['admin@MngEnvMCAP065039.onmicrosoft.com']},
|
33
|
+
sku = "F2")
|
34
|
+
self.assertIsNotNone(resp.name)
|
35
|
+
self.assertEqual(resp.name, capacity_name_new)
|
36
|
+
|
37
|
+
resp = fac.get_capacity(subscription_id, resource_group_name, capacity_name_new)
|
38
|
+
self.assertIsNotNone(resp.name)
|
39
|
+
self.assertEqual(resp.name, capacity_name_new)
|
40
|
+
|
41
|
+
sku = resp.sku['name']
|
42
|
+
|
43
|
+
sleep(60)
|
44
|
+
|
45
|
+
resp = fac.delete_capacity(subscription_id, resource_group_name, capacity_name_new)
|
46
|
+
self.assertEqual(resp.status_code, 202)
|
47
|
+
|
48
|
+
|
49
|
+
resp = fac.list_by_resource_group(subscription_id, resource_group_name)
|
50
|
+
cap_names = [cap["name"] for cap in resp]
|
51
|
+
self.assertIn(capacity_name, cap_names)
|
52
|
+
|
53
|
+
|
54
|
+
|
55
|
+
resp = fac.list_by_subscription(subscription_id)
|
56
|
+
cap_names = [cap["name"] for cap in resp]
|
57
|
+
self.assertIn(capacity_name, cap_names)
|
58
|
+
|
59
|
+
|
60
|
+
resp = fac.list_skus(subscription_id)
|
61
|
+
self.assertGreater(len(resp), 0, msg=f"No SKUs found: {resp}")
|
62
|
+
|
63
|
+
|
64
|
+
resp = fac.list_skus_for_capacity(subscription_id, resource_group_name, capacity_name)
|
65
|
+
self.assertGreater(len(resp), 0, msg=f"No SKUs found: {resp}")
|
66
|
+
|
67
|
+
resp = fac.resume_capacity(subscription_id, resource_group_name, capacity_name)
|
68
|
+
self.assertEqual(resp.status_code, 202)
|
69
|
+
|
70
|
+
sleep(60)
|
71
|
+
resp = fac.suspend_capacity(subscription_id, resource_group_name, capacity_name)
|
72
|
+
self.assertEqual(resp.status_code, 202)
|
73
|
+
sleep(180)
|
74
|
+
|
75
|
+
if sku != "F4":
|
76
|
+
resp = fac.update_capacity(subscription_id, resource_group_name, capacity_name, sku="F4")
|
77
|
+
self.assertEqual(resp.sku["name"], "F4")
|
78
|
+
else:
|
79
|
+
resp = fac.update_capacity(subscription_id, resource_group_name, capacity_name, sku="F2")
|
80
|
+
self.assertEqual(resp.sku["name"], "F2")
|
@@ -0,0 +1,56 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore import FabricClientCore
|
4
|
+
from datetime import datetime
|
5
|
+
|
6
|
+
load_dotenv()
|
7
|
+
|
8
|
+
class TestFabricClientCore(unittest.TestCase):
|
9
|
+
|
10
|
+
def __init__(self, *args, **kwargs):
|
11
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
12
|
+
self.fcc = FabricClientCore()
|
13
|
+
|
14
|
+
def test_folders(self):
|
15
|
+
fcc = self.fcc
|
16
|
+
|
17
|
+
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
|
18
|
+
folder_id = "d4f3a9fb-6975-4f5c-9c6b-ca205280966f"
|
19
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
20
|
+
display_name = f"sdk_folder_{datetime_str}"
|
21
|
+
|
22
|
+
folder = fcc.create_folder(workspace_id=workspace_id, display_name=display_name, parent_folder_id=folder_id)
|
23
|
+
self.assertIsNotNone(folder)
|
24
|
+
self.assertEqual(folder.display_name, display_name)
|
25
|
+
|
26
|
+
folder_ = fcc.get_folder(workspace_id=workspace_id, folder_id=folder.id)
|
27
|
+
self.assertEqual(folder.id, folder_.id)
|
28
|
+
|
29
|
+
folders = fcc.list_folders(workspace_id=workspace_id)
|
30
|
+
folders = [folder for folder in folders if folder.display_name == display_name]
|
31
|
+
self.assertGreater(len(folders), 0)
|
32
|
+
|
33
|
+
folder = fcc.update_folder(workspace_id=workspace_id, folder_id=folder.id, display_name=f"sdk_sub_folder_updated_{datetime_str}")
|
34
|
+
self.assertEqual(folder.display_name, f"sdk_sub_folder_updated_{datetime_str}")
|
35
|
+
|
36
|
+
folder = fcc.move_folder(workspace_id=workspace_id, folder_id=folder.id)
|
37
|
+
self.assertEqual(folder.display_name, f"sdk_sub_folder_updated_{datetime_str}")
|
38
|
+
self.assertEqual(folder.parent_folder_id, "")
|
39
|
+
|
40
|
+
folders = fcc.list_folders(workspace_id=workspace_id)
|
41
|
+
|
42
|
+
for f in folders:
|
43
|
+
if f.display_name != "sdk_folder":
|
44
|
+
f.delete()
|
45
|
+
|
46
|
+
folders = fcc.list_folders(workspace_id=workspace_id)
|
47
|
+
self.assertEqual(len(folders), 1)
|
48
|
+
|
49
|
+
|
50
|
+
|
51
|
+
|
52
|
+
|
53
|
+
|
54
|
+
|
55
|
+
|
56
|
+
|
@@ -0,0 +1,99 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
def test_gateways(self):
|
17
|
+
|
18
|
+
fc = self.fc
|
19
|
+
|
20
|
+
gateways = fc.list_gateways()
|
21
|
+
|
22
|
+
for gw in gateways:
|
23
|
+
if "publicKey" not in gw:
|
24
|
+
fc.delete_gateway(gw['id'])
|
25
|
+
if "publicKey" in gw:
|
26
|
+
gw_id = gw['id']
|
27
|
+
|
28
|
+
datetime_str = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
29
|
+
display_name = 'fabricvnet-' + datetime_str
|
30
|
+
gwr = {'displayName': display_name,
|
31
|
+
'capacityId': '9e7e757d-d567-4fb3-bc4f-d230aabf2a00',
|
32
|
+
'virtualNetworkAzureResource': {'virtualNetworkName': 'fabricvnet',
|
33
|
+
'subnetName': 'default2',
|
34
|
+
'resourceGroupName': 'fabricdemo',
|
35
|
+
'subscriptionId': 'c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8'},
|
36
|
+
'inactivityMinutesBeforeSleep': 30,
|
37
|
+
'numberOfMemberGateways': 2,
|
38
|
+
'type': 'VirtualNetwork'}
|
39
|
+
|
40
|
+
gw = fc.create_gateway(gwr)
|
41
|
+
self.assertEqual(gw['displayName'], gwr['displayName'])
|
42
|
+
|
43
|
+
gateways = fc.list_gateways()
|
44
|
+
self.assertEqual(len(gateways), 2)
|
45
|
+
|
46
|
+
gateways = [g for g in gateways if g['displayName'] == display_name]
|
47
|
+
gw = gateways[0]
|
48
|
+
|
49
|
+
ras = fc.list_gateway_role_assignments(gw['id'])
|
50
|
+
self.assertEqual(len(ras), 1)
|
51
|
+
|
52
|
+
principal = {"id" : "755f273c-98f8-408c-a886-691794938bd8",
|
53
|
+
"type" : "ServicePrincipal"}
|
54
|
+
|
55
|
+
new_ras = fc.add_gateway_role_assignment(gw['id'], principal, 'ConnectionCreator')
|
56
|
+
self.assertIn("id", new_ras)
|
57
|
+
#self.assertEqual(2, len(fc.list_gateway_role_assignments(gw['id'])))
|
58
|
+
|
59
|
+
new_ras = fc.update_gateway_role_assignment(gw['id'], new_ras['id'], 'Admin')
|
60
|
+
self.assertEqual('Admin', new_ras['role'])
|
61
|
+
|
62
|
+
new_ras_ = fc.get_gateway_role_assignment(gw['id'], new_ras['id'])
|
63
|
+
self.assertEqual('Admin', new_ras_['role'])
|
64
|
+
self.assertEqual(new_ras['id'], new_ras_['id'])
|
65
|
+
|
66
|
+
resp_code = fc.delete_gateway_role_assignment(gw['id'], new_ras['id'])
|
67
|
+
self.assertEqual(200, resp_code)
|
68
|
+
self.assertEqual(1, len(fc.list_gateway_role_assignments(gw['id'])))
|
69
|
+
|
70
|
+
|
71
|
+
|
72
|
+
gw_members = fc.list_gateway_members(gw_id)
|
73
|
+
self.assertGreater(len(gw_members), 0)
|
74
|
+
self.assertIn('id', gw_members[0])
|
75
|
+
self.assertIn('displayName', gw_members[0])
|
76
|
+
|
77
|
+
display_name_member = "surface_desktop" + datetime_str
|
78
|
+
|
79
|
+
gw_member = fc.update_gateway_member(gateway_id = gw_id, gateway_member_id = gw_id, display_name=display_name_member, enabled=True)
|
80
|
+
self.assertEqual(display_name_member, gw_member['displayName'])
|
81
|
+
|
82
|
+
gw_ = fc.get_gateway(gw["id"])
|
83
|
+
self.assertEqual(display_name, gw_['displayName'])
|
84
|
+
|
85
|
+
gwr = {
|
86
|
+
"type": "OnPremises",
|
87
|
+
"displayName": display_name,
|
88
|
+
"loadBalancingSetting": "Failover",
|
89
|
+
"allowCloudConnectionRefresh": False,
|
90
|
+
"allowCustomConnectors": False
|
91
|
+
}
|
92
|
+
|
93
|
+
gw_ = fc.update_gateway(gw_id, gwr)
|
94
|
+
self.assertEqual(display_name, gw_['displayName'])
|
95
|
+
|
96
|
+
resp_code = fc.delete_gateway(gw["id"])
|
97
|
+
self.assertEqual(200, resp_code)
|
98
|
+
|
99
|
+
self.assertEqual(len(fc.list_gateways()), 1)
|
@@ -0,0 +1,66 @@
|
|
1
|
+
import unittest
|
2
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
3
|
+
from datetime import datetime
|
4
|
+
from dotenv import load_dotenv
|
5
|
+
|
6
|
+
load_dotenv()
|
7
|
+
|
8
|
+
class TestFabricClientCore(unittest.TestCase):
|
9
|
+
|
10
|
+
def __init__(self, *args, **kwargs):
|
11
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
12
|
+
#load_dotenv()
|
13
|
+
self.fc = FabricClientCore()
|
14
|
+
|
15
|
+
def test_git(self):
|
16
|
+
|
17
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
18
|
+
ws2_name = "git" + datetime_str
|
19
|
+
self.fc.create_workspace(display_name=ws2_name)
|
20
|
+
ws2 = self.fc.get_workspace_by_name(name=ws2_name)
|
21
|
+
self.fc.assign_to_capacity(workspace_id=ws2.id, capacity_id="9e7e757d-d567-4fb3-bc4f-d230aabf2a00")
|
22
|
+
|
23
|
+
git_provider_details = {'organizationName': 'MngEnvMCAP065039',
|
24
|
+
'projectName': 'fabricdevops',
|
25
|
+
'gitProviderType': 'AzureDevOps',
|
26
|
+
'repositoryName': 'fabricdevops',
|
27
|
+
'branchName': 'main',
|
28
|
+
'directoryName': '/sdkdemoRTI'}
|
29
|
+
|
30
|
+
status_code = self.fc.git_connect(workspace_id=ws2.id, git_provider_details=git_provider_details)
|
31
|
+
|
32
|
+
self.assertEqual(status_code, 200)
|
33
|
+
|
34
|
+
initialization_strategy = "PreferWorkspace"
|
35
|
+
|
36
|
+
status_code = self.fc.git_initialize_connection(workspace_id=ws2.id, initialization_strategy=initialization_strategy)
|
37
|
+
self.assertEqual(status_code, 200)
|
38
|
+
|
39
|
+
connection_details = self.fc.git_get_connection(workspace_id=ws2.id)
|
40
|
+
self.assertEqual(connection_details['gitConnectionState'], 'ConnectedAndInitialized')
|
41
|
+
|
42
|
+
status = self.fc.git_get_status(workspace_id=ws2.id)
|
43
|
+
self.assertTrue(len(status["changes"]) > 0)
|
44
|
+
|
45
|
+
git_credentials = self.fc.get_my_git_credentials('e624ffea-990e-482c-b27c-4ed5adae73c6')
|
46
|
+
self.assertTrue(git_credentials["source"] == "Automatic")
|
47
|
+
|
48
|
+
status_code = self.fc.update_from_git(workspace_id=ws2.id, remote_commit_hash=status["remoteCommitHash"])
|
49
|
+
|
50
|
+
self.assertEqual(status_code, 202)
|
51
|
+
|
52
|
+
blubb_lakehouse = False
|
53
|
+
for item in ws2.list_items():
|
54
|
+
if item.type == "Lakehouse" and item.display_name == "blubb":
|
55
|
+
blubb_lakehouse = True
|
56
|
+
|
57
|
+
self.assertTrue(blubb_lakehouse)
|
58
|
+
|
59
|
+
status_code = self.fc.git_disconnect(workspace_id=ws2.id)
|
60
|
+
|
61
|
+
self.assertEqual(status_code, 200)
|
62
|
+
|
63
|
+
ws2.delete()
|
64
|
+
|
65
|
+
if __name__ == "__main__":
|
66
|
+
unittest.main()
|
@@ -0,0 +1,44 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
# class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
class TestFabricClientCore(unittest.TestCase):
|
12
|
+
|
13
|
+
def __init__(self, *args, **kwargs):
|
14
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
15
|
+
#load_dotenv()
|
16
|
+
self.fc = FabricClientCore()
|
17
|
+
|
18
|
+
|
19
|
+
def test_graphql_api(self):
|
20
|
+
|
21
|
+
fc = self.fc
|
22
|
+
workspace_id = '46425c13-5736-4285-972c-6d034020f3ff'
|
23
|
+
|
24
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
25
|
+
|
26
|
+
graph_ql = fc.create_graphql_api(workspace_id, display_name="graphql" + datetime_str)
|
27
|
+
self.assertEqual(graph_ql.display_name, "graphql" + datetime_str)
|
28
|
+
|
29
|
+
graph_qls = fc.list_graphql_apis(workspace_id)
|
30
|
+
graph_ql_names = [gql.display_name for gql in graph_qls]
|
31
|
+
self.assertGreater(len(graph_qls), 0)
|
32
|
+
self.assertIn("graphql" + datetime_str, graph_ql_names)
|
33
|
+
|
34
|
+
gql = fc.get_graphql_api(workspace_id, graphql_api_name="graphql" + datetime_str)
|
35
|
+
self.assertIsNotNone(gql.id)
|
36
|
+
self.assertEqual(gql.display_name, "graphql" + datetime_str)
|
37
|
+
|
38
|
+
gql2 = fc.update_graphql_api(workspace_id, gql.id, display_name=f"graphql{datetime_str}2", return_item=True)
|
39
|
+
|
40
|
+
gql = fc.get_graphql_api(workspace_id, graphql_api_id=gql.id)
|
41
|
+
self.assertEqual(gql.display_name, f"graphql{datetime_str}2")
|
42
|
+
|
43
|
+
status_code = fc.delete_graphql_api(workspace_id, gql.id)
|
44
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,97 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
self.workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
16
|
+
|
17
|
+
|
18
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
19
|
+
self.item_name = "testitem" + datetime_str
|
20
|
+
self.item_type = "Notebook"
|
21
|
+
|
22
|
+
def test_item_end_to_end(self):
|
23
|
+
|
24
|
+
item = self.fc.create_item(display_name=self.item_name, type=self.item_type, workspace_id=self.workspace_id)
|
25
|
+
self.assertEqual(item.display_name, self.item_name)
|
26
|
+
self.assertEqual(item.type, self.item_type)
|
27
|
+
self.assertEqual(item.workspace_id, self.workspace_id)
|
28
|
+
self.assertEqual(item.description, "")
|
29
|
+
|
30
|
+
item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
|
31
|
+
item_ = self.fc.get_item(workspace_id=self.workspace_id,
|
32
|
+
item_name=self.item_name, item_type=self.item_type)
|
33
|
+
self.assertEqual(item.id, item_.id)
|
34
|
+
self.assertEqual(item.display_name, self.item_name)
|
35
|
+
self.assertEqual(item.type, self.item_type)
|
36
|
+
self.assertEqual(item.workspace_id, self.workspace_id)
|
37
|
+
self.assertEqual(item.description, "")
|
38
|
+
|
39
|
+
item_list = self.fc.list_items(workspace_id=self.workspace_id)
|
40
|
+
self.assertTrue(len(item_list) > 0)
|
41
|
+
|
42
|
+
item_ids = [item_.id for item_ in item_list]
|
43
|
+
self.assertIn(item.id, item_ids)
|
44
|
+
|
45
|
+
self.fc.update_item(workspace_id=self.workspace_id, item_id=item.id, display_name=f"u{self.item_name}", return_item=True)
|
46
|
+
item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
|
47
|
+
self.assertEqual(item.display_name, f"u{self.item_name}")
|
48
|
+
|
49
|
+
status_code = self.fc.delete_item(workspace_id=self.workspace_id, item_id=item.id)
|
50
|
+
|
51
|
+
self.assertAlmostEqual(status_code, 200)
|
52
|
+
|
53
|
+
def test_item_definition(self):
|
54
|
+
|
55
|
+
sjd = self.fc.get_item(workspace_id=self.workspace_id, item_name="helloworld", item_type="SparkJobDefinition")
|
56
|
+
self.assertIsNotNone(sjd.definition)
|
57
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
58
|
+
blubb2 = "blubb2" + datetime_str
|
59
|
+
blubb3 = "blubb3" + datetime_str
|
60
|
+
blubb2 = self.fc.create_item(display_name=blubb2, type="SparkJobDefinition", workspace_id=self.workspace_id,
|
61
|
+
definition=sjd.definition)
|
62
|
+
|
63
|
+
blubb3 = self.fc.create_item(display_name=blubb3, type="SparkJobDefinition", workspace_id=self.workspace_id)
|
64
|
+
|
65
|
+
response = self.fc.update_item_definition(workspace_id=self.workspace_id,
|
66
|
+
item_id=blubb3.id, definition=sjd.definition)
|
67
|
+
|
68
|
+
self.assertEqual(response.status_code, 200)
|
69
|
+
blubb3 = self.fc.get_item(workspace_id=self.workspace_id, item_id=blubb3.id)
|
70
|
+
|
71
|
+
self.assertIn("parts", blubb3.definition)
|
72
|
+
|
73
|
+
self.assertEqual(len(blubb3.definition["parts"]), len(sjd.definition["parts"]))
|
74
|
+
sjd_defintion = [part["path"] for part in sjd.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
|
75
|
+
blubb3_defintion = [part["path"] for part in blubb3.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
|
76
|
+
self.assertEqual(sjd_defintion, blubb3_defintion)
|
77
|
+
|
78
|
+
self.assertNotEqual(blubb2.id, sjd.id)
|
79
|
+
self.assertIn("parts", blubb2.definition)
|
80
|
+
|
81
|
+
self.assertEqual(len(blubb2.definition["parts"]), len(sjd.definition["parts"]))
|
82
|
+
sjd_defintion = [part["path"] for part in sjd.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
|
83
|
+
blubb2_defintion = [part["path"] for part in blubb2.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
|
84
|
+
self.assertEqual(sjd_defintion, blubb2_defintion)
|
85
|
+
self.assertNotEqual(blubb2.id, blubb3.id)
|
86
|
+
|
87
|
+
blubb2.delete()
|
88
|
+
blubb3.delete()
|
89
|
+
|
90
|
+
def test_item_connections(self):
|
91
|
+
|
92
|
+
fc = self.fc
|
93
|
+
connections = fc.list_item_connections(workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3', item_id = '82c01e0c-4cee-4a62-9806-870699ced699')
|
94
|
+
self.assertEqual(len(connections), 0)
|
95
|
+
|
96
|
+
if __name__ == "__main__":
|
97
|
+
unittest.main()
|
@@ -0,0 +1,98 @@
|
|
1
|
+
import unittest
|
2
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
3
|
+
from datetime import datetime
|
4
|
+
from dotenv import load_dotenv
|
5
|
+
|
6
|
+
load_dotenv()
|
7
|
+
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
self.workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
16
|
+
self.item_id = "9a2edf0f-2318-4179-80d0-1002f3dae7b1"
|
17
|
+
|
18
|
+
|
19
|
+
def test_jobs_end_to_end(self):
|
20
|
+
job = self.fc.run_on_demand_item_job(workspace_id=self.workspace_id,
|
21
|
+
item_id=self.item_id,
|
22
|
+
job_type="RunNotebook")
|
23
|
+
|
24
|
+
self.assertEqual(job.item_id, self.item_id)
|
25
|
+
self.assertEqual(job.workspace_id, self.workspace_id)
|
26
|
+
self.assertEqual(job.job_type, "RunNotebook")
|
27
|
+
self.assertIn(job.status, ["NotStarted", "InProgress", "Failed"])
|
28
|
+
self.assertEqual(job.invoke_type, "Manual")
|
29
|
+
|
30
|
+
job2 = self.fc.get_item_job_instance(workspace_id=self.workspace_id,
|
31
|
+
item_id=self.item_id,
|
32
|
+
job_instance_id=job.id)
|
33
|
+
|
34
|
+
self.assertEqual(job.id, job2.id)
|
35
|
+
|
36
|
+
status_code = self.fc.cancel_item_job_instance(workspace_id=self.workspace_id,
|
37
|
+
item_id=self.item_id,
|
38
|
+
job_instance_id=job.id)
|
39
|
+
|
40
|
+
self.assertEqual(status_code, 202)
|
41
|
+
|
42
|
+
job_instances = self.fc.list_item_job_instances(workspace_id=self.workspace_id,
|
43
|
+
item_id=self.item_id)
|
44
|
+
|
45
|
+
self.assertGreaterEqual(len(job_instances), 1)
|
46
|
+
|
47
|
+
def test_item_schedules(self):
|
48
|
+
|
49
|
+
fc = self.fc
|
50
|
+
|
51
|
+
|
52
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
53
|
+
|
54
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
55
|
+
spark_job_definition_name = f"sjd{datetime_str}"
|
56
|
+
|
57
|
+
spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
|
58
|
+
definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
|
59
|
+
|
60
|
+
|
61
|
+
self.assertIsNotNone(definition)
|
62
|
+
self.assertIn("definition", definition)
|
63
|
+
definition = definition["definition"]
|
64
|
+
|
65
|
+
spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name, definition=definition)
|
66
|
+
|
67
|
+
self.assertIsNotNone(spark_job_definition)
|
68
|
+
|
69
|
+
configuration = {'type': 'Daily',
|
70
|
+
'startDateTime': '2024-11-21T00:00:00',
|
71
|
+
'endDateTime': '2028-11-08T23:59:00',
|
72
|
+
'localTimeZoneId': 'Romance Standard Time',
|
73
|
+
'times': ['15:39']}
|
74
|
+
|
75
|
+
schedule = spark_job_definition.create_item_schedule(job_type="sparkjob", configuration=configuration, enabled=True)
|
76
|
+
|
77
|
+
schedule_id = schedule["id"]
|
78
|
+
schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
|
79
|
+
self.assertIsNotNone(schedule_check)
|
80
|
+
self.assertEqual(schedule_check["id"], schedule_id)
|
81
|
+
|
82
|
+
schedule_new = spark_job_definition.update_item_schedule(schedule_id=schedule_id, job_type="sparkjob", configuration=configuration, enabled=False)
|
83
|
+
self.assertIsNotNone(schedule_new)
|
84
|
+
|
85
|
+
item_id = spark_job_definition.id
|
86
|
+
|
87
|
+
schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
|
88
|
+
self.assertEqual(schedule_check["id"], schedule_id)
|
89
|
+
self.assertFalse(schedule_check["enabled"])
|
90
|
+
list_schedules = fc.list_item_schedules(workspace_id, item_id, job_type="sparkjob")
|
91
|
+
|
92
|
+
self.assertGreater(len(list_schedules), 0)
|
93
|
+
|
94
|
+
spark_job_definition.delete()
|
95
|
+
|
96
|
+
if __name__ == "__main__":
|
97
|
+
unittest.main()
|
98
|
+
|
@@ -0,0 +1,63 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
|
17
|
+
def test_kql_dashboards(self):
|
18
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
19
|
+
fc = self.fc
|
20
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
21
|
+
|
22
|
+
kql_dash = fc.get_kql_dashboard(workspace_id, kql_dashboard_name='dashboard1')
|
23
|
+
kql_dash_orig_id = kql_dash.id
|
24
|
+
|
25
|
+
|
26
|
+
kql_dash_name = "testdash" + datetime_str
|
27
|
+
|
28
|
+
kql_dash = fc.create_kql_dashboard(display_name=kql_dash_name, workspace_id=workspace_id)
|
29
|
+
self.assertEqual(kql_dash.display_name, kql_dash_name)
|
30
|
+
|
31
|
+
definition_orig = fc.get_kql_dashboard_definition(workspace_id, kql_dash_orig_id)
|
32
|
+
definition_orig = definition_orig["definition"]
|
33
|
+
self.assertIsNotNone(definition_orig)
|
34
|
+
|
35
|
+
definition = fc.update_kql_dashboard_definition(workspace_id, kql_dash.id, definition=definition_orig)
|
36
|
+
|
37
|
+
self.assertIsNotNone(definition)
|
38
|
+
|
39
|
+
kql_dashs = fc.list_kql_dashboards(workspace_id)
|
40
|
+
|
41
|
+
kql_dash_names = [kqld.display_name for kqld in kql_dashs]
|
42
|
+
self.assertGreater(len(kql_dashs), 0)
|
43
|
+
self.assertIn(kql_dash_name, kql_dash_names)
|
44
|
+
self.assertIn('dashboard1', kql_dash_names)
|
45
|
+
|
46
|
+
kql_dash2 = fc.get_kql_dashboard(workspace_id, kql_dashboard_name=kql_dash_name)
|
47
|
+
self.assertIsNotNone(kql_dash2.id)
|
48
|
+
self.assertEqual(kql_dash2.display_name, kql_dash_name)
|
49
|
+
|
50
|
+
new_name = kql_dash_name+"2"
|
51
|
+
kql_dash3 = fc.update_kql_dashboard(workspace_id, kql_dash.id, display_name=new_name, return_item=True)
|
52
|
+
|
53
|
+
self.assertEqual(kql_dash3.display_name, new_name)
|
54
|
+
self.assertEqual(kql_dash.id, kql_dash3.id)
|
55
|
+
|
56
|
+
resp_code = fc.delete_kql_dashboard(workspace_id, kql_dash3.id)
|
57
|
+
self.assertEqual(resp_code, 200)
|
58
|
+
|
59
|
+
kql_dashs = fc.list_kql_dashboards(workspace_id)
|
60
|
+
|
61
|
+
kql_dash_names = [kqld.display_name for kqld in kql_dashs]
|
62
|
+
self.assertNotIn(kql_dash3.display_name, kql_dash_names)
|
63
|
+
|