mercuto-client 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mercuto-client might be problematic. Click here for more details.

@@ -0,0 +1,30 @@
1
+ """
2
+ Copyright (C) 2025 Rockfield Technologies Australia Pty Ltd
3
+
4
+ This program is free software: you can redistribute it and/or modify
5
+ it under the terms of the GNU Affero General Public License as published
6
+ by the Free Software Foundation, either version 3 of the License, or
7
+ (at your option) any later version.
8
+
9
+ This program is distributed in the hope that it will be useful,
10
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ GNU Affero General Public License for more details.
13
+
14
+ You should have received a copy of the GNU Affero General Public License
15
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
16
+ """
17
+ from typing import Mapping, Optional
18
+
19
+ from .client import MercutoClient
20
+ from .exceptions import MercutoClientException, MercutoHTTPException
21
+
22
+ __all__ = ['MercutoClient', 'MercutoHTTPException', 'MercutoClientException']
23
+
24
+
25
+ def connect(api_key: Optional[str] = None,
26
+ service_token: Optional[str] = None,
27
+ headers: Optional[Mapping[str, str]] = None) -> MercutoClient:
28
+ return MercutoClient().connect(api_key=api_key,
29
+ service_token=service_token,
30
+ headers=headers)
File without changes
File without changes
File without changes
@@ -0,0 +1,210 @@
1
+ import os
2
+ import sqlite3
3
+ import tempfile
4
+ import time
5
+ from typing import Generator, Tuple
6
+
7
+ import pytest
8
+
9
+ from ...ingester.processor import FileProcessor
10
+
11
+
12
+ def mock_process_callback(filepath: str) -> bool:
13
+ return "success" in filepath # Simulate success if filename contains "success"
14
+
15
+
16
+ @pytest.fixture
17
+ def temp_env() -> Generator[Tuple[FileProcessor, str, str], None, None]:
18
+ """Setup temporary directory and database"""
19
+ buffer_dir: str = tempfile.mkdtemp()
20
+ workdir = tempfile.mkdtemp()
21
+ db_path: str = os.path.join(workdir, "test_buffer.db")
22
+
23
+ processor: FileProcessor = FileProcessor(
24
+ buffer_dir=buffer_dir,
25
+ db_path=db_path,
26
+ max_files=3,
27
+ max_attempts=2,
28
+ process_callback=mock_process_callback
29
+ )
30
+
31
+ yield processor, buffer_dir, db_path
32
+
33
+ # Cleanup
34
+ if os.path.exists(db_path):
35
+ os.remove(db_path)
36
+
37
+
38
+ def test_init_db(temp_env: Tuple[FileProcessor, str, str]) -> None:
39
+ """Verify database initialization"""
40
+ processor, _, db_path = temp_env
41
+ conn: sqlite3.Connection = sqlite3.connect(db_path)
42
+ cursor: sqlite3.Cursor = conn.cursor()
43
+ cursor.execute(
44
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='file_buffer'")
45
+ assert cursor.fetchone() is not None # Table should exist
46
+ conn.close()
47
+
48
+
49
+ def test_register_file(temp_env: Tuple[FileProcessor, str, str]) -> None:
50
+ """Check if new files are correctly registered in the database"""
51
+ processor, buffer_dir, _ = temp_env
52
+ test_file: str = os.path.join(buffer_dir, "file1.txt")
53
+
54
+ # Create a test file
55
+ with open(test_file, "w") as f:
56
+ f.write("Test content")
57
+
58
+ processor.add_file_to_db(test_file)
59
+
60
+ conn: sqlite3.Connection = sqlite3.connect(processor.get_db_path())
61
+ cursor: sqlite3.Cursor = conn.cursor()
62
+ cursor.execute(
63
+ "SELECT filename FROM file_buffer WHERE filename = 'file1.txt'")
64
+ assert cursor.fetchone() is not None # File should be registered
65
+ conn.close()
66
+
67
+
68
+ def test_file_processing(temp_env: Tuple[FileProcessor, str, str]) -> None:
69
+ """Verify file processing and persistence"""
70
+ processor, buffer_dir, _ = temp_env
71
+ test_file: str = os.path.join(buffer_dir, "success_file.txt")
72
+
73
+ with open(test_file, "w") as f:
74
+ f.write("Test content")
75
+
76
+ processor.add_file_to_db(test_file)
77
+ processor.process_next_file()
78
+
79
+ conn: sqlite3.Connection = sqlite3.connect(processor.get_db_path())
80
+ cursor: sqlite3.Cursor = conn.cursor()
81
+ cursor.execute(
82
+ "SELECT status FROM file_buffer WHERE filename = 'success_file.txt'")
83
+ # File should be marked as processed
84
+ assert cursor.fetchone()[0] == "processed"
85
+ conn.close()
86
+
87
+
88
+ def test_file_processing_in_order(temp_env: Tuple[FileProcessor, str, str]) -> None:
89
+ """Verify file processing and persistence"""
90
+ processor, buffer_dir, _ = temp_env
91
+
92
+ # Make some test files but with a reverse name so the order on the filesystem is not the same as the order in the database
93
+ for i in reversed(range(5)):
94
+ test_file: str = os.path.join(buffer_dir, f"success_file_{i}.txt")
95
+ with open(test_file, "w") as f:
96
+ f.write("Test content")
97
+
98
+ processor.add_file_to_db(test_file)
99
+
100
+ for i in reversed(range(5)):
101
+ processed = processor.process_next_file()
102
+ assert processed is not None, "Should process a file"
103
+ name = os.path.basename(processed)
104
+ assert name == f'success_file_{i}.txt', f"Should process files in order, got {name}"
105
+
106
+
107
+ def test_retry_attempts(temp_env: Tuple[FileProcessor, str, str]) -> None:
108
+ """Ensure failed files are retried up to max_attempts"""
109
+ processor, buffer_dir, _ = temp_env
110
+ test_file: str = os.path.join(buffer_dir, "fail_file.txt")
111
+
112
+ with open(test_file, "w") as f:
113
+ f.write("Test content")
114
+
115
+ processor.add_file_to_db(test_file)
116
+
117
+ for _ in range(2):
118
+ processor.process_next_file()
119
+
120
+ conn: sqlite3.Connection = sqlite3.connect(processor.get_db_path())
121
+ cursor: sqlite3.Cursor = conn.cursor()
122
+ cursor.execute(
123
+ "SELECT attempts FROM file_buffer WHERE filename = 'fail_file.txt'")
124
+ # Should match max_attempts
125
+ assert cursor.fetchone()[0] == 2
126
+ conn.close()
127
+
128
+
129
+ @pytest.mark.parametrize("filename_part", ["success", "unprocessed", "failed"])
130
+ def test_cleanup_old_files_different_processing_status(temp_env: Tuple[FileProcessor, str, str], filename_part: str) -> None:
131
+ """Ensure cleanup keeps only max_files, no matter the file status"""
132
+ processor, buffer_dir, _ = temp_env
133
+
134
+ # Create more files than max_files limit
135
+ for i in range(5):
136
+ test_file: str = os.path.join(buffer_dir, f"{filename_part}{i}.txt")
137
+ with open(test_file, "w") as f:
138
+ f.write("Test content")
139
+ processor.add_file_to_db(test_file)
140
+ if filename_part != 'unprocessed':
141
+ # Process files that are not unprocessed
142
+ processor.process_next_file()
143
+
144
+ processor.cleanup_old_files()
145
+
146
+ conn: sqlite3.Connection = sqlite3.connect(processor.get_db_path())
147
+ cursor: sqlite3.Cursor = conn.cursor()
148
+ cursor.execute(
149
+ "SELECT COUNT(*) FROM file_buffer")
150
+ # Should retain only max_files
151
+ assert cursor.fetchone()[0] == 3
152
+
153
+ # The last fiiles should be index 2,3,4
154
+ cursor.execute(
155
+ "SELECT filename FROM file_buffer ORDER BY timestamp ASC")
156
+ remaining_files: list[tuple[str]] = cursor.fetchall()
157
+ expected_files = [f"{filename_part}2.txt", f"{filename_part}3.txt", f"{filename_part}4.txt"]
158
+ assert [f[0] for f in remaining_files] == expected_files
159
+
160
+ conn.close()
161
+
162
+
163
+ def test_scan_existing_files(temp_env: Tuple[FileProcessor, str, str]) -> None:
164
+ """Ensure that new files added are detected"""
165
+ processor, buffer_dir, _ = temp_env
166
+
167
+ for i in range(5):
168
+ test_file = os.path.join(buffer_dir, f"file{i}.txt")
169
+ with open(test_file, "w") as f:
170
+ f.write("Test content")
171
+ time.sleep(0.05)
172
+
173
+ processor.scan_existing_files()
174
+ conn: sqlite3.Connection = sqlite3.connect(processor.get_db_path())
175
+ cursor: sqlite3.Cursor = conn.cursor()
176
+ cursor.execute(
177
+ "SELECT filename FROM file_buffer ORDER BY timestamp asc")
178
+
179
+ files: list[tuple[str]] = cursor.fetchall()
180
+ conn.close()
181
+ assert files == [('file0.txt',), ('file1.txt',), ('file2.txt',), ('file3.txt',), ('file4.txt',)]
182
+
183
+
184
+ def test_scan_existing_files_that_havnt_been_processed(temp_env: Tuple[FileProcessor, str, str]) -> None:
185
+ """Ensure that new files added are detected"""
186
+ processor, buffer_dir, _ = temp_env
187
+
188
+ for i in range(5):
189
+ test_file = os.path.join(buffer_dir, f"file{i}.txt")
190
+ with open(test_file, "w") as f:
191
+ f.write("Test content")
192
+ time.sleep(0.05)
193
+
194
+ if i < 3:
195
+ # Simulate adding for the first 3 files
196
+ processor.add_file_to_db(test_file)
197
+ if i < 2:
198
+ # Simulate processing for the first 2 files
199
+ processor.process_next_file()
200
+
201
+ time.sleep(0.05)
202
+ processor.scan_existing_files()
203
+ conn: sqlite3.Connection = sqlite3.connect(processor.get_db_path())
204
+ cursor: sqlite3.Cursor = conn.cursor()
205
+ cursor.execute(
206
+ "SELECT filename FROM file_buffer ORDER BY timestamp asc")
207
+
208
+ files: list[tuple[str]] = cursor.fetchall()
209
+ conn.close()
210
+ assert files == [('file0.txt',), ('file1.txt',), ('file2.txt',), ('file3.txt',), ('file4.txt',)]
@@ -0,0 +1,37 @@
1
+ import ftplib
2
+ import io
3
+ import os
4
+ import tempfile
5
+ from datetime import datetime, timezone
6
+
7
+ from ...ingester.ftp import simple_ftp_server
8
+
9
+
10
+ def test_simple_ftp_server():
11
+ receives = []
12
+ def clock(): return datetime(2023, 10, 1, 12, 0, 0, tzinfo=timezone.utc)
13
+
14
+ with tempfile.TemporaryDirectory() as temp_dir:
15
+ with simple_ftp_server(directory=temp_dir,
16
+ username='test', password='password', port=2121,
17
+ callback=lambda dest: receives.append(dest), clock=clock):
18
+ client = ftplib.FTP()
19
+ client.connect('localhost', 2121)
20
+ client.login('test', 'password')
21
+
22
+ # Upload a test file
23
+ testbuf = io.BytesIO(b'This is a test file.')
24
+ client.storbinary('STOR test_file.txt', testbuf)
25
+
26
+ assert len(receives) == 1
27
+ assert receives[0].endswith('test_file_20231001T120000.txt')
28
+
29
+ # Verify the file was uploaded correctly
30
+ with open(receives[0], 'rb') as f:
31
+ content = f.read()
32
+ assert content == b'This is a test file.'
33
+
34
+ # Ensure that the file exists in the temp directory
35
+ found = os.listdir(temp_dir)
36
+ assert 'test_file_20231001T120000.txt' in found
37
+ assert len(found) == 1 # Only one file should be present
@@ -0,0 +1,145 @@
1
+ import math
2
+ import os
3
+ import tempfile
4
+
5
+ import pytest
6
+
7
+ from ...ingester.parsers import (detect_parser, parse_campbell_file,
8
+ parse_worldsensing_compact_file,
9
+ parse_worldsensing_standard_file)
10
+
11
+ RESOURCES_DIR = os.path.join(os.path.dirname(__file__), "resources")
12
+
13
+
14
+ def test_worldsensing_compacted_parser():
15
+ file = os.path.join(RESOURCES_DIR, "worldsensing-compacted-sample-file.dat")
16
+ mapper = {
17
+ "channel1": "12345678",
18
+ "channel2": "abcdefgh",
19
+ }
20
+ samples = parse_worldsensing_compact_file(file, mapper)
21
+ assert len(samples) == 4
22
+ assert samples[0]['channel_code'] == "12345678"
23
+ assert math.isclose(samples[0]['value'], -10)
24
+ assert samples[0]['timestamp'] == '2025-05-20T15:00:00'
25
+
26
+ assert samples[1]['channel_code'] == "abcdefgh"
27
+ assert math.isclose(samples[1]['value'], 5)
28
+ assert samples[0]['timestamp'] == '2025-05-20T15:00:00'
29
+
30
+ assert samples[2]['channel_code'] == "12345678"
31
+ assert math.isclose(samples[2]['value'], -12)
32
+ assert samples[2]['timestamp'] == '2025-05-20T16:00:00'
33
+
34
+ assert samples[3]['channel_code'] == "abcdefgh"
35
+ assert math.isclose(samples[3]['value'], 10)
36
+ assert samples[3]['timestamp'] == '2025-05-20T16:00:00'
37
+
38
+
39
+ def test_worldsensing_standard_parser():
40
+ file = os.path.join(RESOURCES_DIR, "worldsensing-standard-sample-file.csv")
41
+ mapper = {
42
+ "AtmPressure-85544-in-mbar": "12345678",
43
+ "freqSqInDigit-85544-VW-Ch1": "abcdefgh",
44
+ }
45
+ samples = parse_worldsensing_standard_file(file, mapper)
46
+ assert len(samples) == 10
47
+ assert samples[0]['channel_code'] == "12345678"
48
+ assert math.isclose(samples[0]['value'], 930.5)
49
+ assert samples[0]['timestamp'] == '2024-04-15T12:35:00'
50
+
51
+ assert samples[1]['channel_code'] == "abcdefgh"
52
+ assert math.isclose(samples[1]['value'], 726.810811024)
53
+ assert samples[1]['timestamp'] == '2024-04-15T12:35:00'
54
+
55
+ assert samples[8]['channel_code'] == "12345678"
56
+ assert math.isclose(samples[8]['value'], 930.4)
57
+ assert samples[8]['timestamp'] == '2024-04-15T12:39:00'
58
+
59
+ assert samples[9]['channel_code'] == "abcdefgh"
60
+ assert math.isclose(samples[9]['value'], 726.841502500)
61
+ assert samples[9]['timestamp'] == '2024-04-15T12:39:00'
62
+
63
+
64
+ def test_campbells_parser():
65
+ file = os.path.join(RESOURCES_DIR, "campbell-sample-file.dat")
66
+ mapper = {
67
+ "VWu_1": "aaaaaaaa",
68
+ "VWu_2": "bbbbbbbb",
69
+ "Therm(1)": "cccccccc",
70
+ "Therm(2)": "dddddddd",
71
+ "Diag_Max(1)": "eeeeeeee",
72
+ "Diag_Max(2)": "ffffffff",
73
+ }
74
+ samples = parse_campbell_file(file, mapper)
75
+ assert len(samples) == 6*4
76
+
77
+ assert samples[0]['channel_code'] == "aaaaaaaa"
78
+ assert math.isclose(samples[0]['value'], 1234.5)
79
+ assert samples[0]['timestamp'] == '2023-12-07T00:01:00'
80
+
81
+ for i in range(1, 6):
82
+ assert samples[i]['channel_code'] == list(mapper.values())[i]
83
+ assert math.isnan(samples[i]['value'])
84
+ assert samples[i]['timestamp'] == '2023-12-07T00:01:00'
85
+
86
+ assert samples[6]['channel_code'] == "aaaaaaaa"
87
+ assert math.isclose(samples[6]['value'], 1234.5)
88
+ assert samples[6]['timestamp'] == '2023-12-07T00:02:00'
89
+ for i in range(7, 12):
90
+ assert samples[i]['channel_code'] == list(mapper.values())[i-6]
91
+ assert math.isnan(samples[i]['value'])
92
+ assert samples[i]['timestamp'] == '2023-12-07T00:02:00'
93
+
94
+ assert samples[12]['channel_code'] == "aaaaaaaa"
95
+ assert math.isclose(samples[12]['value'], 1234.5)
96
+ assert samples[12]['timestamp'] == '2023-12-07T00:03:00'
97
+ assert samples[13]['channel_code'] == "bbbbbbbb"
98
+ assert math.isclose(samples[13]['value'], 1234.5)
99
+ assert samples[13]['timestamp'] == '2023-12-07T00:03:00'
100
+
101
+ for i in range(15, 17):
102
+ assert samples[i]['channel_code'] == list(mapper.values())[i-12]
103
+ assert math.isnan(samples[i]['value'])
104
+ assert samples[i]['timestamp'] == '2023-12-07T00:03:00'
105
+ assert samples[17]['channel_code'] == "ffffffff"
106
+ assert math.isclose(samples[17]['value'], 1537)
107
+ assert samples[17]['timestamp'] == '2023-12-07T00:03:00'
108
+
109
+ assert samples[18]['channel_code'] == "aaaaaaaa"
110
+ assert math.isclose(samples[18]['value'], 1234.5)
111
+ assert samples[18]['timestamp'] == '2023-12-07T00:04:00'
112
+ assert samples[19]['channel_code'] == "bbbbbbbb"
113
+ assert math.isclose(samples[19]['value'], 1234.5)
114
+ assert samples[19]['timestamp'] == '2023-12-07T00:04:00'
115
+ assert samples[20]['channel_code'] == "cccccccc"
116
+ assert math.isclose(samples[20]['value'], 27.5)
117
+ assert samples[20]['timestamp'] == '2023-12-07T00:04:00'
118
+ assert samples[21]['channel_code'] == "dddddddd"
119
+ assert math.isclose(samples[21]['value'], 25)
120
+ assert samples[21]['timestamp'] == '2023-12-07T00:04:00'
121
+ assert samples[22]['channel_code'] == "eeeeeeee"
122
+ assert math.isclose(samples[22]['value'], 255)
123
+ assert samples[22]['timestamp'] == '2023-12-07T00:04:00'
124
+ assert samples[23]['channel_code'] == "ffffffff"
125
+ assert math.isclose(samples[23]['value'], 0)
126
+ assert samples[23]['timestamp'] == '2023-12-07T00:04:00'
127
+
128
+
129
+ def test_detect_file_type():
130
+ compacted_file = os.path.join(RESOURCES_DIR, "worldsensing-compacted-sample-file.dat")
131
+ standard_file = os.path.join(RESOURCES_DIR, "worldsensing-standard-sample-file.csv")
132
+ campbell_file = os.path.join(RESOURCES_DIR, "campbell-sample-file.dat")
133
+
134
+ assert detect_parser(compacted_file) == parse_worldsensing_compact_file
135
+ assert detect_parser(standard_file) == parse_worldsensing_standard_file
136
+ assert detect_parser(campbell_file) == parse_campbell_file
137
+
138
+ # Test with an unknown file type
139
+ with tempfile.TemporaryDirectory() as dir:
140
+ unknown_file = os.path.join(dir, "unknown-file.txt")
141
+ with open(unknown_file, "w") as f:
142
+ f.write("This is an unknown file format.")
143
+
144
+ with pytest.raises(ValueError):
145
+ detect_parser(unknown_file)
@@ -0,0 +1,93 @@
1
+ import pytest
2
+
3
+ from .. import MercutoClient
4
+ from ..mocks import mock_client
5
+
6
+
7
+ def test_mock_injection_before_client_creation():
8
+ count = 0
9
+
10
+ def on_get_healthcheck(*args, **kwargs):
11
+ nonlocal count
12
+ count += 1
13
+ return 'mocked'
14
+ with mock_client() as mock:
15
+ mock.on('GET', '/healthcheck', on_get_healthcheck)
16
+ client = MercutoClient()
17
+ assert client.healthcheck() == 'mocked'
18
+ assert count == 1
19
+
20
+
21
+ def test_mock_injection_after_client_creation():
22
+ count = 0
23
+ client = MercutoClient()
24
+
25
+ def on_get_healthcheck(*args, **kwargs):
26
+ nonlocal count
27
+ count += 1
28
+ return 'mocked'
29
+ with mock_client() as mock:
30
+ mock.on('GET', '/healthcheck', on_get_healthcheck)
31
+ assert client.healthcheck() == 'mocked'
32
+ assert count == 1
33
+
34
+
35
+ def test_mock_releases_after_end_of_context():
36
+ client = MercutoClient()
37
+ with mock_client() as mock:
38
+ key = mock.add_user(user='this is a test')
39
+ client.connect(api_key=key)
40
+ assert client.identity().verify_me()['user'] == 'this is a test'
41
+
42
+ with pytest.raises(Exception):
43
+ client.identity().verify_me()
44
+
45
+
46
+ def test_mock_verify_me():
47
+ client = MercutoClient()
48
+ with mock_client() as mock:
49
+ with pytest.raises(Exception):
50
+ client.identity().verify_me()
51
+
52
+ client.connect(api_key='bad api key')
53
+ with pytest.raises(Exception):
54
+ client.identity().verify_me()
55
+
56
+ key = mock.add_user(user='this is a test user',
57
+ tenant='test-tenant', permission_group='test-group')
58
+ client.connect(api_key=key)
59
+ assert client.identity().verify_me()['user'] == 'this is a test user'
60
+ assert client.identity().verify_me()['tenant'] == 'test-tenant'
61
+ assert client.identity().verify_me()[
62
+ 'permission_group'] == 'test-group'
63
+
64
+ mock.delete_user(key)
65
+ with pytest.raises(Exception):
66
+ client.identity().verify_me()
67
+
68
+
69
+ def test_mock_get_user():
70
+ client = MercutoClient()
71
+ with mock_client() as mock:
72
+ client.connect(api_key='bad api key')
73
+ with pytest.raises(Exception):
74
+ client.identity().get_user('12345')
75
+
76
+ key = mock.add_user(user='code1', tenant='test-tenant', permission_group='test-group',
77
+ username='testing@example.com')
78
+ client.connect(api_key=key)
79
+
80
+ assert client.identity().get_user('code1')['code'] == 'code1'
81
+ assert client.identity().get_user(
82
+ 'code1')['username'] == 'testing@example.com'
83
+
84
+ mock.delete_user(key)
85
+ with pytest.raises(Exception):
86
+ client.identity().verify_me()
87
+
88
+
89
+ def test_mock_unsupported_endpoint():
90
+ client = MercutoClient()
91
+ with mock_client():
92
+ with pytest.raises(NotImplementedError):
93
+ client.identity().list_tenants()
@@ -0,0 +1,13 @@
1
+ from datetime import timedelta
2
+
3
+
4
+ def timedelta_isoformat(td: timedelta) -> str:
5
+ """
6
+ ISO 8601 encoding for Python timedelta object.
7
+ Taken from pydantic source:
8
+ https://github.com/pydantic/pydantic/blob/3704eccce4661455acdda1cdcf716bd4b3382e08/pydantic/deprecated/json.py#L135-L140
9
+
10
+ """
11
+ minutes, seconds = divmod(td.seconds, 60)
12
+ hours, minutes = divmod(minutes, 60)
13
+ return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
mercuto_client/acl.py ADDED
@@ -0,0 +1,101 @@
1
+ import json
2
+ from typing import TypeVar
3
+
4
+ from .types import AccessControlListJson, AccessControlListJsonEntry
5
+
6
+
7
+ class ResourceTypes:
8
+ WILDCARD = '*'
9
+
10
+ class Mercuto:
11
+ """
12
+ Resource types available for ServiceTypes.MERCUTO
13
+ """
14
+ PROJECT = 'project'
15
+ SYSTEM = 'system'
16
+ WILDCARD = '*'
17
+
18
+ class Identity:
19
+ """
20
+ Resource types available for ServiceTypes.IDENTITY
21
+ """
22
+ TENANT = 'tenant'
23
+ WILDCARD = '*'
24
+
25
+
26
+ class AllowedActions:
27
+ WILDCARD = '*'
28
+
29
+ class Mercuto:
30
+ """
31
+ Actions available for ServiceTypes.MERCUTO
32
+ """
33
+ WILDCARD = '*'
34
+ VIEW_PROJECT = 'MERCUTO:VIEW_PROJECT'
35
+ MANAGE_PROJECT = 'MERCUTO:MANAGE_PROJECT'
36
+ EDIT_PROJECT = 'MERCUTO:EDIT_PROJECT'
37
+ UPLOAD_DATA = 'MERCUTO:UPLOAD_DATA'
38
+ EDIT_SYSTEM = 'MERCUTO:EDIT_SYSTEM'
39
+
40
+ class Identity:
41
+ """
42
+ Actions available for ServiceTypes.IDENTITY
43
+ """
44
+ WILDCARD = '*'
45
+ VIEW_TENANT = 'IDENTITY:VIEW_TENANT'
46
+ MANAGE_TENANT = 'IDENTITY:MANAGE_TENANT'
47
+ EDIT_TENANT = 'IDENTITY:EDIT_TENANT'
48
+ VIEW_USER_DETAILED_INFO = 'IDENTITY:VIEW_USER_DETAILED_INFO'
49
+ CREATE_NEW_TENANTS = 'IDENTITY:CREATE_NEW_TENANTS'
50
+
51
+
52
+ class ServiceTypes:
53
+ class Mercuto:
54
+ Name = 'mercuto'
55
+ ResourceTypes = ResourceTypes.Mercuto
56
+ AllowedActions = AllowedActions.Mercuto
57
+
58
+ class Identity:
59
+ Name = 'identity'
60
+ ResourceTypes = ResourceTypes.Identity
61
+ AllowedActions = AllowedActions.Identity
62
+
63
+ IDENTITY = Identity.Name
64
+ MERCUTO = Mercuto.Name
65
+ WILDCARD = '*'
66
+
67
+
68
+ T = TypeVar('T', bound='AclPolicyBuilder')
69
+
70
+
71
+ class AclPolicyBuilder:
72
+ def __init__(self) -> None:
73
+ self._permissions: list[AccessControlListJsonEntry] = []
74
+
75
+ def allow(self: T, action: str, resource: str) -> T:
76
+ self._permissions.append({
77
+ 'action': action,
78
+ 'resource': resource
79
+ })
80
+ return self
81
+
82
+ def allow_all(self: T, action: str) -> T:
83
+ self.allow(action, f"mrn:{ServiceTypes.WILDCARD}:{ResourceTypes.WILDCARD}/{ResourceTypes.WILDCARD}")
84
+ return self
85
+
86
+ def allow_project(self: T, action: str, project_code: str) -> T:
87
+ self.allow(action, f"mrn:{ServiceTypes.MERCUTO}:{ResourceTypes.Mercuto.PROJECT}/{project_code}")
88
+ return self
89
+
90
+ def allow_tenant(self: T, action: str, tenant_code: str) -> T:
91
+ self.allow(action, f"mrn:{ServiceTypes.IDENTITY}:{ResourceTypes.Identity.TENANT}/{tenant_code}")
92
+ return self
93
+
94
+ def as_string(self) -> str:
95
+ return json.dumps(self.as_dict())
96
+
97
+ def as_dict(self) -> AccessControlListJson:
98
+ return {
99
+ 'version': 1,
100
+ 'permissions': self._permissions
101
+ }