matlab-proxy 0.15.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of matlab-proxy might be problematic. Click here for more details.

Files changed (41) hide show
  1. matlab_proxy/app.py +13 -7
  2. matlab_proxy/app_state.py +9 -6
  3. matlab_proxy/constants.py +1 -0
  4. matlab_proxy/gui/asset-manifest.json +3 -3
  5. matlab_proxy/gui/index.html +1 -1
  6. matlab_proxy/gui/static/js/{main.14aa7840.js → main.522d83ba.js} +3 -3
  7. matlab_proxy/gui/static/js/main.522d83ba.js.map +1 -0
  8. matlab_proxy/settings.py +7 -4
  9. matlab_proxy/util/__init__.py +8 -1
  10. matlab_proxy/util/mwi/token_auth.py +19 -5
  11. {matlab_proxy-0.15.0.dist-info → matlab_proxy-0.16.0.dist-info}/METADATA +1 -1
  12. {matlab_proxy-0.15.0.dist-info → matlab_proxy-0.16.0.dist-info}/RECORD +40 -20
  13. tests/integration/integration_tests_with_license/test_http_end_points.py +4 -4
  14. tests/integration/integration_tests_without_license/conftest.py +4 -3
  15. tests/integration/integration_tests_without_license/test_matlab_is_down_if_unlicensed.py +3 -0
  16. tests/unit/__init__.py +1 -0
  17. tests/unit/conftest.py +67 -0
  18. tests/unit/test_app.py +1113 -0
  19. tests/unit/test_app_state.py +586 -0
  20. tests/unit/test_constants.py +6 -0
  21. tests/unit/test_ddux.py +22 -0
  22. tests/unit/test_devel.py +246 -0
  23. tests/unit/test_non_dev_mode.py +169 -0
  24. tests/unit/test_settings.py +460 -0
  25. tests/unit/util/__init__.py +3 -0
  26. tests/unit/util/mwi/__init__.py +1 -0
  27. tests/unit/util/mwi/embedded_connector/__init__.py +1 -0
  28. tests/unit/util/mwi/embedded_connector/test_helpers.py +29 -0
  29. tests/unit/util/mwi/embedded_connector/test_request.py +64 -0
  30. tests/unit/util/mwi/test_custom_http_headers.py +281 -0
  31. tests/unit/util/mwi/test_logger.py +49 -0
  32. tests/unit/util/mwi/test_token_auth.py +303 -0
  33. tests/unit/util/mwi/test_validators.py +331 -0
  34. tests/unit/util/test_mw.py +550 -0
  35. tests/unit/util/test_util.py +135 -0
  36. matlab_proxy/gui/static/js/main.14aa7840.js.map +0 -1
  37. /matlab_proxy/gui/static/js/{main.14aa7840.js.LICENSE.txt → main.522d83ba.js.LICENSE.txt} +0 -0
  38. {matlab_proxy-0.15.0.dist-info → matlab_proxy-0.16.0.dist-info}/LICENSE.md +0 -0
  39. {matlab_proxy-0.15.0.dist-info → matlab_proxy-0.16.0.dist-info}/WHEEL +0 -0
  40. {matlab_proxy-0.15.0.dist-info → matlab_proxy-0.16.0.dist-info}/entry_points.txt +0 -0
  41. {matlab_proxy-0.15.0.dist-info → matlab_proxy-0.16.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,550 @@
1
+ # Copyright 2020-2023 The MathWorks, Inc.
2
+
3
+ import datetime
4
+ import random
5
+ import re
6
+ import secrets
7
+ from collections import namedtuple
8
+ from datetime import timedelta, timezone
9
+ from http import HTTPStatus
10
+
11
+ import pytest
12
+ from matlab_proxy import settings
13
+ from matlab_proxy.util import mw, system
14
+ from matlab_proxy.util.mwi import exceptions
15
+
16
+ """This file tests methods present in matlab_proxy/util/mw.py
17
+ """
18
+
19
+
20
+ @pytest.fixture(name="mwa_api_data")
21
+ def mwa_api_data_fixture():
22
+ """Pytest fixture which returns a namedtuple.
23
+
24
+ The namedtuple contains values required for MW authentication
25
+
26
+ Returns:
27
+ namedtuple: A named tuple containing mwa, mhlm end-point URLs (with regex patterns), source_id, identity_token, access_token and matlab_release.
28
+ """
29
+
30
+ mwa_api_endpoint = "https://login.mathworks.com/authenticationws/service/v4"
31
+ mwa_api_endpoint_pattern = re.compile("^" + mwa_api_endpoint)
32
+ mhlm_api_endpoint = (
33
+ "https://licensing.mathworks.com/mls/service/v1/entitlement/list"
34
+ )
35
+ mhlm_api_endpoint_pattern = re.compile("^" + mhlm_api_endpoint)
36
+ mhlm_context = "jupyter"
37
+
38
+ identity_token = secrets.token_urlsafe(324)
39
+ source_id = secrets.token_urlsafe(21)
40
+ access_token = secrets.token_urlsafe(22)
41
+ matlab_release = "R2020b"
42
+
43
+ mwa_api_variables = namedtuple(
44
+ "mwa_api_variables",
45
+ [
46
+ "mwa_api_endpoint",
47
+ "mwa_api_endpoint_pattern",
48
+ "mhlm_api_endpoint",
49
+ "mhlm_api_endpoint_pattern",
50
+ "identity_token",
51
+ "source_id",
52
+ "access_token",
53
+ "matlab_release",
54
+ "mhlm_context",
55
+ ],
56
+ )
57
+
58
+ variables = mwa_api_variables(
59
+ mwa_api_endpoint,
60
+ mwa_api_endpoint_pattern,
61
+ mhlm_api_endpoint,
62
+ mhlm_api_endpoint_pattern,
63
+ identity_token,
64
+ source_id,
65
+ access_token,
66
+ matlab_release,
67
+ mhlm_context,
68
+ )
69
+
70
+ return variables
71
+
72
+
73
+ @pytest.fixture(name="fetch_access_token_valid_json")
74
+ def fetch_access_token_valid_json_fixture():
75
+ """Pytest fixture which returns a dict.
76
+
77
+ This fixture returns a dict representing a valid json response from mhlm servers.
78
+
79
+ Returns:
80
+ dict : A dictionary containing Key-value pairs present in a valid json response from mhlm servers.
81
+ """
82
+
83
+ now = datetime.datetime.now(timezone.utc)
84
+ authentication_date = str(now.strftime("%Y-%m-%dT%H:%M:%S.%f%z"))
85
+ expiration_date = str((now + timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.%f%z"))
86
+
87
+ login_identifier = "abc@mathworks.com"
88
+ reference_ID = int("".join([str(random.randint(0, 10)) for _ in range(8)]))
89
+
90
+ first_name = "abc"
91
+ last_name = "def"
92
+ user_id = int("".join([str(random.randint(0, 10)) for _ in range(13)]))
93
+
94
+ access_token_string = int("".join([str(random.randint(0, 10)) for _ in range(272)]))
95
+
96
+ json_data = {
97
+ "authenticationDate": authentication_date,
98
+ "expirationDate": expiration_date,
99
+ "id": 0,
100
+ "loginIdentifier": login_identifier,
101
+ "loginIdentifierType": "MWAS",
102
+ "referenceDetail": {
103
+ "referenceId": str(reference_ID),
104
+ "country": "IN",
105
+ "email": login_identifier,
106
+ "firstName": first_name,
107
+ "lastName": last_name,
108
+ "displayName": first_name,
109
+ "sector": "None",
110
+ "userId": "mwa" + str(user_id),
111
+ "profilePicture": "https://www.mathworks.com/",
112
+ },
113
+ "referenceId": str(reference_ID),
114
+ "referenceType": "WEBPROFILEID",
115
+ "source": "desktop-jupyter",
116
+ "accessTokenString": str(access_token_string),
117
+ }
118
+
119
+ return json_data
120
+
121
+
122
+ class MockResponse:
123
+ def __init__(self, ok, payload={}, status=HTTPStatus.OK, text=""):
124
+ self._payload = payload
125
+ self._text = text
126
+ self.ok = ok
127
+ self.status = status
128
+
129
+ async def json(self):
130
+ return self._payload
131
+
132
+ async def text(self):
133
+ return self._text
134
+
135
+ async def __aenter__(self):
136
+ return self
137
+
138
+ async def __aexit__(self, exc_type, exc, tb):
139
+ pass
140
+
141
+
142
+ async def test_fetch_access_token(mwa_api_data, fetch_access_token_valid_json, mocker):
143
+ """Test to check mw.fetch_access_token method returns valid json response.
144
+
145
+ The mock_response fixture mocks the aiohttp.ClientSession().post() method to return a custom HTTP response.
146
+
147
+ Args:
148
+ mwa_api_data (namedtuple): A pytest fixture which returns a namedtuple containing values for MW servers.
149
+ fetch_access_token_valid_json (Dict): A pytest fixture which returns a dict representing a valid json response.
150
+ mocker: Built in pytest fixture which can be used to mock functions.
151
+ """
152
+ json_data = fetch_access_token_valid_json
153
+
154
+ payload = dict(accessTokenString=json_data["accessTokenString"])
155
+
156
+ mock_resp = MockResponse(payload=payload, ok=True)
157
+
158
+ url_pattern = mwa_api_data.mwa_api_endpoint_pattern
159
+
160
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
161
+ res = await mw.fetch_access_token(
162
+ mwa_api_data.mwa_api_endpoint,
163
+ mwa_api_data.identity_token,
164
+ mwa_api_data.source_id,
165
+ )
166
+
167
+ _, args, _ = mocked.mock_calls[0]
168
+ url = args[0]
169
+
170
+ assert re.match(url_pattern, url)
171
+ assert json_data["accessTokenString"] == res["token"]
172
+
173
+
174
+ async def test_fetch_access_token_licensing_error(mwa_api_data, mocker):
175
+ """Test to check mw.fetch_access_token() method raises a mwi_exceptions.OnlineLicensingError.
176
+
177
+ When an invalid response is received from the server, this test checks if mwi_exceptions.OnlineLicensingError is raised.
178
+ Args:
179
+ mocker: Built in pytest fixture which can be used to mock functions.
180
+ mwa_api_data (namedtuple): A pytest fixture which returns a namedtuple containing values for MW authentication
181
+ """
182
+
183
+ url_pattern = mwa_api_data.mwa_api_endpoint_pattern
184
+
185
+ mock_resp = MockResponse(payload={}, ok=False, status=HTTPStatus.NOT_FOUND)
186
+
187
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
188
+
189
+ with pytest.raises(exceptions.OnlineLicensingError):
190
+ resp = await mw.fetch_access_token(
191
+ mwa_api_data.mwa_api_endpoint,
192
+ mwa_api_data.identity_token,
193
+ mwa_api_data.source_id,
194
+ )
195
+
196
+ _, args, _ = mocked.mock_calls[0]
197
+ url = args[0]
198
+ assert re.match(url_pattern, url)
199
+
200
+
201
+ async def test_fetch_expand_token_licensing_error(mocker, mwa_api_data):
202
+ """Test to check fetch_expand_token raises mwi_exceptions.OnlineLicensing error.
203
+
204
+ Args:
205
+ mocker: Built in pytest fixture which can be used to mock functions.
206
+ mwa_api_data (namedtuple): A pytest fixture which returns a namedtuple containing values for MW authentication
207
+ """
208
+ url_pattern = mwa_api_data.mwa_api_endpoint_pattern
209
+ mock_resp = MockResponse(
210
+ payload={}, ok=False, status=HTTPStatus.SERVICE_UNAVAILABLE
211
+ )
212
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
213
+
214
+ with pytest.raises(exceptions.OnlineLicensingError):
215
+ resp = await mw.fetch_expand_token(
216
+ mwa_api_data.mwa_api_endpoint,
217
+ mwa_api_data.identity_token,
218
+ mwa_api_data.source_id,
219
+ )
220
+
221
+ _, args, _ = mocked.mock_calls[0]
222
+ url = args[0]
223
+ assert re.match(url_pattern, url)
224
+
225
+
226
+ @pytest.fixture(name="fetch_expand_token_valid_json")
227
+ def fetch_expand_token_valid_json_fixture():
228
+ """Pytest fixture which returns a dict
229
+
230
+ The return value represents a valid json response when mw.fetch_expand_token function is called.
231
+
232
+ Returns:
233
+ dict: A dict representing valid json response.
234
+ """
235
+ now = datetime.datetime.now(timezone.utc)
236
+ expiration_date = str((now + timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.%f%z"))
237
+ first_name = "abc"
238
+ display_name = first_name
239
+ last_name = "def"
240
+ reference_ID = int("".join([str(random.randint(0, 10)) for _ in range(8)]))
241
+ user_id = int("".join([str(random.randint(0, 10)) for _ in range(13)]))
242
+
243
+ json_data = {
244
+ "expirationDate": str(expiration_date),
245
+ "referenceDetail": {
246
+ "referenceId": str(reference_ID),
247
+ "firstName": first_name,
248
+ "lastName": last_name,
249
+ "displayName": first_name,
250
+ "userId": "mwa" + str(user_id),
251
+ },
252
+ }
253
+
254
+ return json_data
255
+
256
+
257
+ async def test_fetch_expand_token(mocker, fetch_expand_token_valid_json, mwa_api_data):
258
+ """Test to check if mw.fetch_expand_token returns a correct json response
259
+
260
+ mock_response is used to mock ClientSession.post method to return a HTTP Response containing a valid json response.
261
+ Args:
262
+ mocker: Built in pytest fixture which can be used to mock functions.
263
+ fetch_expand_token_valid_json (namedtuple): Pytest fixture which returns a dict which is returned by the server when no exception is raised.
264
+ mwa_api_data (namedtuple): A namedtuple which contains info related to mwa.
265
+ """
266
+ json_data = fetch_expand_token_valid_json
267
+
268
+ url_pattern = mwa_api_data.mwa_api_endpoint_pattern
269
+ referenceDetail = dict(
270
+ firstName=json_data["referenceDetail"]["firstName"],
271
+ lastName=json_data["referenceDetail"]["lastName"],
272
+ displayName=json_data["referenceDetail"]["displayName"],
273
+ userId=json_data["referenceDetail"]["userId"],
274
+ referenceId=json_data["referenceDetail"]["referenceId"],
275
+ )
276
+
277
+ payload = dict(
278
+ expirationDate=json_data["expirationDate"], referenceDetail=referenceDetail
279
+ )
280
+
281
+ mock_resp = MockResponse(payload=payload, ok=True, status=HTTPStatus.OK)
282
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
283
+
284
+ resp = await mw.fetch_expand_token(
285
+ mwa_api_data.mwa_api_endpoint,
286
+ mwa_api_data.identity_token,
287
+ mwa_api_data.source_id,
288
+ )
289
+
290
+ _, args, _ = mocked.mock_calls[0]
291
+ url = args[0]
292
+
293
+ assert resp is not None and len(resp.keys()) > 0
294
+ assert re.match(url_pattern, url)
295
+
296
+
297
+ async def test_fetch_entitlements_licensing_error(mocker, mwa_api_data):
298
+ """Test to check if fetch_entitlements raises mwi_exceptions.OnlineLicensingError.
299
+
300
+ When an invalid response is received, this test checks if mwi_exceptions.OnlineLicenseError is raised.
301
+
302
+ Args:
303
+ mocker: Built in pytest fixture which can be used to mock functions.
304
+ mwa_api_data (namedtuple): A namedtuple which contains info related to mwa.
305
+ """
306
+ url_pattern = mwa_api_data.mhlm_api_endpoint_pattern
307
+ mock_resp = MockResponse(
308
+ payload={}, ok=False, status=HTTPStatus.SERVICE_UNAVAILABLE
309
+ )
310
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
311
+
312
+ with pytest.raises(exceptions.OnlineLicensingError):
313
+ resp = await mw.fetch_entitlements(
314
+ mwa_api_data.mhlm_api_endpoint,
315
+ mwa_api_data.access_token,
316
+ mwa_api_data.matlab_release,
317
+ )
318
+
319
+ _, args, _ = mocked.mock_calls[0]
320
+ url = args[0]
321
+ assert re.match(url_pattern, url)
322
+
323
+
324
+ @pytest.fixture(
325
+ name="invalid_entitlements",
326
+ params=[
327
+ """<?xml version="1.0" encoding="UTF-8"?>
328
+ <describe_entitlements_response>
329
+ </describe_entitlements_response>""",
330
+ """<?xml version="1.0" encoding="UTF-8"?>
331
+ <describe_entitlements_response>
332
+ <entitlements>
333
+ </entitlements>
334
+ </describe_entitlements_response>""",
335
+ ],
336
+ ids=[
337
+ "Invalid Entitlement : No entitlements tag",
338
+ "Invalid Entitlement : Empty entitlements tag",
339
+ ],
340
+ )
341
+ def invalid_entitlements_fixture(request):
342
+ """A parameterized pytest fixture which returns invalid entitlements.
343
+
344
+
345
+ Args:
346
+ request : Built-in pytest fixture
347
+
348
+ Returns:
349
+ [String]: A string containing invalid Entitlements.
350
+ """
351
+ return request.param
352
+
353
+
354
+ async def test_fetch_entitlements_entitlement_error(
355
+ mocker, mwa_api_data, invalid_entitlements
356
+ ):
357
+ """Test to check fetch_entitlements raises mwi_exceptions.EntitlementError.
358
+
359
+
360
+ When invalid entitlements are received as a response, this test checks if mw.fetch_entitlements raises an
361
+ mwi_exceptions.EntitlementError. mock_response mocks aiohttp.ClientSession.post method to send invalid entitlements as a HTTP response.
362
+
363
+ Args:
364
+
365
+ mocker: Built in pytest fixture which can be used to mock functions.
366
+ mwa_api_data (namedtuple): A namedtuple which contains info related to mwa.
367
+ invalid_entitlements (String): String containing invalid entitlements
368
+ """
369
+ url_pattern = mwa_api_data.mhlm_api_endpoint_pattern
370
+
371
+ mock_resp = MockResponse(
372
+ payload={}, ok=True, text=invalid_entitlements, status=HTTPStatus.NOT_FOUND
373
+ )
374
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
375
+
376
+ with pytest.raises(exceptions.EntitlementError):
377
+ resp = await mw.fetch_entitlements(
378
+ mwa_api_data.mhlm_api_endpoint,
379
+ mwa_api_data.access_token,
380
+ mwa_api_data.matlab_release,
381
+ )
382
+
383
+ _, args, _ = mocked.mock_calls[0]
384
+ url = args[0]
385
+ assert re.match(url_pattern, url)
386
+
387
+
388
+ @pytest.fixture(name="valid_entitlements")
389
+ def valid_entitlements_fixture():
390
+ """
391
+ Pytest fixture which returns a string representing valid entitlements
392
+ """
393
+ id = int("".join([str(random.randint(0, 10)) for _ in range(7)]))
394
+ license_number = int("".join([str(random.randint(0, 10)) for _ in range(8)]))
395
+
396
+ return """<?xml version="1.0" encoding="UTF-8"?>
397
+ <describe_entitlements_response>
398
+ <entitlements>
399
+ <entitlement>
400
+ <id>%d</id>
401
+ <label>MATLAB</label>
402
+ <license_number>%d</license_number>
403
+ </entitlement>
404
+ </entitlements>
405
+ </describe_entitlements_response>""" % (
406
+ id,
407
+ license_number,
408
+ )
409
+
410
+
411
+ async def test_fetch_entitlements(mocker, mwa_api_data, valid_entitlements):
412
+ """Test to check test_fetch_entitlements returns valid response.
413
+
414
+
415
+ mock_response mocks aiohttpClientSession.post() method to return valid entitlements as a HTTP response
416
+ Args:
417
+ mocker: Built in pytest fixture which can be used to mock functions.
418
+ mwa_api_data (namedtuple): A namedtuple which contains info related to mwa.
419
+ valid_entitlements (String): String containing valid entitlements as a response.
420
+ """
421
+
422
+ url_pattern = mwa_api_data.mhlm_api_endpoint_pattern
423
+
424
+ mock_resp = MockResponse(
425
+ payload={}, ok=True, text=valid_entitlements, status=HTTPStatus.OK
426
+ )
427
+ mocked = mocker.patch("aiohttp.ClientSession.post", return_value=mock_resp)
428
+
429
+ resp = await mw.fetch_entitlements(
430
+ mwa_api_data.mhlm_api_endpoint,
431
+ mwa_api_data.access_token,
432
+ mwa_api_data.matlab_release,
433
+ )
434
+
435
+ _, args, _ = mocked.mock_calls[0]
436
+ url = args[0]
437
+
438
+ assert resp is not None and len(resp) > 0
439
+ assert re.match(url_pattern, url)
440
+
441
+
442
+ def test_parse_mhlm_no_error():
443
+ """Test to check mw.parse_mhlm_error() method returns none when no mhlm specific error
444
+ is present in the logs.
445
+ """
446
+ logs = ["Starting MATLAB proxy-app", "Error parsing config, resetting."]
447
+
448
+ actual_output = mw.parse_mhlm_error(logs)
449
+ expected_output = None
450
+
451
+ assert actual_output == expected_output
452
+
453
+
454
+ def test_parse_mhlm_error():
455
+ """Test to check mw.parse_mhlm_error() returns an mwi_exceptions.OnlineLiceningError.
456
+
457
+ When logs contain mhlm specific error information, this test checks if mwi_exceptions.OnlineLicensingError is raised.
458
+ """
459
+ logs = ["License Manager Error", "MHLM Licensing Failed"]
460
+ actual_output = mw.parse_mhlm_error(logs)
461
+ expected_output = exceptions.OnlineLicensingError
462
+
463
+ assert isinstance(actual_output, expected_output)
464
+
465
+
466
+ def test_parse_nlm_no_error():
467
+ """Test to check parse_nlm_error returns none when no nlm specific error information is present in logs."""
468
+ logs = []
469
+ conn_str = ""
470
+ expected_output = None
471
+
472
+ assert mw.parse_nlm_error(logs, conn_str) == expected_output
473
+
474
+
475
+ def test_parse_nlm_error():
476
+ """Test to check parse_nlm_error() method returns an exception when logs contain an error.
477
+
478
+ When logs contain nlm specific errors, this test checks if parse_nlm_error() raises mwi_exceptions.NetworkLicensingError.
479
+ """
480
+ logs = [
481
+ "Starting MATLAB proxy-app",
482
+ "License checkout failed",
483
+ "Error parsing config, resetting.",
484
+ "Diagnostic Information",
485
+ ]
486
+
487
+ conn_str = "123@nlm"
488
+
489
+ actual_output = mw.parse_nlm_error(logs, conn_str)
490
+ expected_output = exceptions.NetworkLicensingError
491
+
492
+ assert isinstance(actual_output, expected_output)
493
+
494
+
495
+ def test_parse_other_error():
496
+ """This test checks if exception.MatlabError is raised when matlab processes returncode is not 0 and logs contain
497
+ matlab specific information
498
+ """
499
+ logs = ["Starting MATLAB proxy-app", "Error parsing config, resetting."]
500
+
501
+ expected_output = exceptions.MatlabError
502
+ actual_output = mw.parse_other_error(logs)
503
+
504
+ assert isinstance(actual_output, expected_output)
505
+
506
+
507
+ def test_range_matlab_connector_ports():
508
+ """This test checks if the generator mw.range _matlab_connector_ports()
509
+ yields consecutive port numbers.
510
+ """
511
+ port_range = mw.range_matlab_connector_ports()
512
+ first_port = next(port_range)
513
+ second_port = next(port_range)
514
+
515
+ assert first_port + 1 == second_port
516
+
517
+
518
+ @pytest.mark.skipif(
519
+ not system.is_linux(),
520
+ reason="Xvfb is only required on linux based operating systems",
521
+ )
522
+ async def test_create_xvfb_process(loop):
523
+ """Test to check if more than 1 xvfb process can be created with -displayfd flag
524
+
525
+ Creates 2 xvfb processes with '-displayfd' flag and checks if the processes are
526
+ running on unique display ports
527
+ """
528
+ settings_1 = settings.get(dev=True)
529
+
530
+ # Return if Xvfb is not available
531
+ if not settings_1["is_xvfb_available"]:
532
+ return
533
+
534
+ xvfb_cmd_1, pipe_1 = settings.create_xvfb_cmd()
535
+ xvfb_cmd_2, pipe_2 = settings.create_xvfb_cmd()
536
+
537
+ # Create Xvfb processes
538
+ xvfb_1, display_port_1 = await mw.create_xvfb_process(xvfb_cmd_1, pipe_1, {})
539
+ xvfb_2, display_port_2 = await mw.create_xvfb_process(xvfb_cmd_2, pipe_2, {})
540
+
541
+ # Verify
542
+ assert xvfb_1 is not None and xvfb_2 is not None
543
+ assert display_port_1 != display_port_2
544
+
545
+ # Clean up
546
+ xvfb_1.terminate()
547
+ await xvfb_1.wait()
548
+
549
+ xvfb_2.terminate()
550
+ await xvfb_2.wait()
@@ -0,0 +1,135 @@
1
+ # Copyright 2020-2024 The MathWorks, Inc.
2
+
3
+ import asyncio
4
+ import pytest
5
+ import psutil
6
+
7
+ from matlab_proxy.util import get_child_processes, system, add_signal_handlers, prettify
8
+ from matlab_proxy.util import system
9
+
10
+
11
+ def test_get_supported_termination_signals():
12
+ """Test to check for supported OS signals."""
13
+ assert len(system.get_supported_termination_signals()) >= 1
14
+
15
+
16
+ def test_add_signal_handlers(loop: asyncio.AbstractEventLoop):
17
+ """Test to check if signal handlers are being added to asyncio loop
18
+
19
+ Args:
20
+ loop (asyncio loop): In built-in pytest fixture.
21
+ """
22
+
23
+ loop = add_signal_handlers(loop)
24
+
25
+ # In posix systems, event loop is modified with new signal handlers
26
+ if system.is_posix():
27
+ assert loop._signal_handlers is not None
28
+ assert loop._signal_handlers.items() is not None
29
+
30
+ else:
31
+ import signal
32
+
33
+ # In a windows system, the signal handlers are added to the 'signal' package.
34
+ for interrupt_signal in system.get_supported_termination_signals():
35
+ assert signal.getsignal(interrupt_signal) is not None
36
+
37
+
38
+ def test_prettify():
39
+ """Tests if text is prettified"""
40
+ txt_arr = ["Hello world"]
41
+
42
+ prettified_txt = prettify(boundary_filler="=", text_arr=txt_arr)
43
+
44
+ assert txt_arr[0] in prettified_txt
45
+ assert "=" in prettified_txt
46
+
47
+
48
+ def test_get_child_processes_no_children_initially(mocker):
49
+ import time
50
+
51
+ # Create mock processes
52
+ mock_parent_process_psutil = mocker.MagicMock(spec=psutil.Process)
53
+ mock_child_processes = [mocker.MagicMock(spec=psutil.Process) for _ in range(2)]
54
+
55
+ # Mock the Process class from psutil
56
+ mocker.patch("psutil.Process", return_value=mock_parent_process_psutil)
57
+ mock_parent_process_psutil.is_running.return_value = True
58
+
59
+ # Function that changes the behavior of .children() after a delay
60
+ def children_side_effect(*args, **kwargs):
61
+ # Wait for a specific time to simulate delay in the child process being present
62
+ time.sleep(0.4)
63
+ return mock_child_processes
64
+
65
+ mock_parent_process_psutil.children.side_effect = children_side_effect
66
+
67
+ # Create a mock for asyncio.subprocess.Process with a dummy pid
68
+ parent_process = mocker.MagicMock(spec=asyncio.subprocess.Process)
69
+ parent_process.pid = 12345
70
+
71
+ # Call the function with the mocked parent process
72
+ child_processes = get_child_processes(parent_process)
73
+
74
+ # Assert that the return value is our list of mock child processes
75
+ assert child_processes == mock_child_processes
76
+
77
+ # Assert that is_running and children methods were called on the mock
78
+ mock_parent_process_psutil.children.assert_called_with(recursive=False)
79
+
80
+
81
+ def test_get_child_processes_no_children(mocker):
82
+ # Create a mock for asyncio.subprocess.Process with a dummy pid
83
+ parent_process = mocker.MagicMock(spec=asyncio.subprocess.Process)
84
+ parent_process.pid = 12345
85
+
86
+ # Mock the Process class from psutil
87
+ mock_parent_process_psutil = mocker.MagicMock(spec=psutil.Process)
88
+ mocker.patch("psutil.Process", return_value=mock_parent_process_psutil)
89
+ mock_parent_process_psutil.is_running.return_value = True
90
+ mock_parent_process_psutil.children.return_value = []
91
+
92
+ # Call the function with the mocked parent process
93
+ with pytest.raises(RuntimeError):
94
+ get_child_processes(parent_process)
95
+
96
+
97
+ def test_get_child_processes_with_children(mocker):
98
+ # Create mock processes
99
+ mock_parent_process_psutil = mocker.MagicMock(spec=psutil.Process)
100
+ mock_child_process = mocker.MagicMock(spec=psutil.Process)
101
+
102
+ # Mock the Process class from psutil
103
+ mocker.patch("psutil.Process", return_value=mock_parent_process_psutil)
104
+ mock_parent_process_psutil.is_running.return_value = True
105
+
106
+ # Mock a list of child processes that psutil would return
107
+ mock_parent_process_psutil.children.return_value = [mock_child_process]
108
+
109
+ # Create a mock for asyncio.subprocess.Process with a dummy pid
110
+ parent_process = mocker.MagicMock(spec=asyncio.subprocess.Process)
111
+ parent_process.pid = 12345
112
+
113
+ # Call the function with the mocked parent process
114
+ child_processes = get_child_processes(parent_process)
115
+
116
+ # Assert that the returned value is a list containing the mock child process
117
+ assert child_processes == [mock_child_process]
118
+
119
+
120
+ def test_get_child_processes_parent_not_running(mocker):
121
+ # Mock the Process class from psutil
122
+ mock_parent_process_psutil = mocker.MagicMock(spec=psutil.Process)
123
+ mocker.patch("psutil.Process", return_value=mock_parent_process_psutil)
124
+ mock_parent_process_psutil.is_running.return_value = False
125
+
126
+ # Create a mock for asyncio.subprocess.Process with a dummy pid
127
+ parent_process = mocker.MagicMock(spec=asyncio.subprocess.Process)
128
+ parent_process.pid = 12345
129
+
130
+ # Calling the function with a non-running parent process should raise an AssertionError
131
+ with pytest.raises(
132
+ AssertionError,
133
+ match="Can't check for child processes as the parent process is no longer running.",
134
+ ):
135
+ get_child_processes(parent_process)