mercuto-client 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mercuto-client might be problematic. Click here for more details.
- mercuto_client/__init__.py +30 -0
- mercuto_client/_tests/__init__.py +0 -0
- mercuto_client/_tests/conftest.py +0 -0
- mercuto_client/_tests/test_ingester/__init__.py +0 -0
- mercuto_client/_tests/test_ingester/test_file_processor.py +210 -0
- mercuto_client/_tests/test_ingester/test_ftp.py +37 -0
- mercuto_client/_tests/test_ingester/test_parsers.py +145 -0
- mercuto_client/_tests/test_mocking.py +93 -0
- mercuto_client/_util.py +13 -0
- mercuto_client/acl.py +101 -0
- mercuto_client/client.py +903 -0
- mercuto_client/exceptions.py +15 -0
- mercuto_client/ingester/__init__.py +0 -0
- mercuto_client/ingester/__main__.py +287 -0
- mercuto_client/ingester/ftp.py +115 -0
- mercuto_client/ingester/parsers/__init__.py +42 -0
- mercuto_client/ingester/parsers/campbell.py +12 -0
- mercuto_client/ingester/parsers/generic_csv.py +114 -0
- mercuto_client/ingester/parsers/worldsensing.py +23 -0
- mercuto_client/ingester/processor.py +291 -0
- mercuto_client/ingester/util.py +64 -0
- mercuto_client/mocks.py +203 -0
- mercuto_client/py.typed +0 -0
- mercuto_client/types.py +409 -0
- mercuto_client-0.1.0.dist-info/METADATA +16 -0
- mercuto_client-0.1.0.dist-info/RECORD +29 -0
- mercuto_client-0.1.0.dist-info/WHEEL +5 -0
- mercuto_client-0.1.0.dist-info/licenses/LICENSE +619 -0
- mercuto_client-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import sqlite3
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from typing import Callable, Optional
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class FileProcessor:
|
|
12
|
+
"""
|
|
13
|
+
System for processing files in a strict order with retry logic.
|
|
14
|
+
|
|
15
|
+
Keeps an SQLite database to track files and their processing status.
|
|
16
|
+
Keeps old files in the buffer and only deletes them once max_files is reached (whether processed or not).
|
|
17
|
+
|
|
18
|
+
:param buffer_dir: Directory where files are stored
|
|
19
|
+
:param db_path: Path to the SQLite database file for tracking file processing status.
|
|
20
|
+
:param process_callback: Callable that processes a file. Should return True if processing is successful
|
|
21
|
+
:param max_attempts: Maximum number of attempts to process a file before marking it as failed.
|
|
22
|
+
:param max_files: Maximum number of files to keep in the buffer directory. If None, no limit is enforced.
|
|
23
|
+
:param free_space_mb: Optional minimum free space in MB to keep on the partition where the buffer directory is located.
|
|
24
|
+
This is combined with max_files to determine when to delete old files and takes precedence over max_files.
|
|
25
|
+
:param clock: Optional callable that returns the timestamp for the file based on the filename. Takes in the file name as an argument
|
|
26
|
+
and should return a float representing the timestamp in seconds since the epoch. Defaults to datetime.now().timestamp().
|
|
27
|
+
This clock is NOT used when scanning existing files, it takes its own clock function.
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
Provides a callback for processing files, which should return True if successful.
|
|
31
|
+
If processing fails, it retries up to max_attempts times before marking the file as failed.
|
|
32
|
+
|
|
33
|
+
Periodically call `cleanup_old_files()` to remove old files from the buffer directory.
|
|
34
|
+
Use `scan_existing_files()` to register files that were added while the system was offline.
|
|
35
|
+
Use `process_next_file()` to process the next file in the buffer in strict order.
|
|
36
|
+
Add files to the buffer using `add_file_to_db()`, or use `start_watching()` to automatically watch a directory for new files.
|
|
37
|
+
|
|
38
|
+
Example usage:
|
|
39
|
+
```python
|
|
40
|
+
processor = FileProcessor(...)
|
|
41
|
+
processor.scan_existing_files()
|
|
42
|
+
|
|
43
|
+
while True:
|
|
44
|
+
processor.add_file_to_db("path/to/file.txt")
|
|
45
|
+
processor.process_next_file()
|
|
46
|
+
```
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self, buffer_dir: str, db_path: str,
|
|
50
|
+
process_callback: Callable[[str], bool],
|
|
51
|
+
max_attempts: int,
|
|
52
|
+
max_files: Optional[int] = None,
|
|
53
|
+
free_space_mb: Optional[float] = None,
|
|
54
|
+
clock: Optional[Callable[[str], float]] = None
|
|
55
|
+
) -> None:
|
|
56
|
+
self._buffer_dir = buffer_dir
|
|
57
|
+
self._db_path = db_path
|
|
58
|
+
self._max_files = max_files
|
|
59
|
+
self._max_attempts = max_attempts
|
|
60
|
+
self._process_callback = process_callback
|
|
61
|
+
self._free_space_mb = free_space_mb
|
|
62
|
+
if clock is None:
|
|
63
|
+
# Default clock function to get current time in seconds since epoch
|
|
64
|
+
def clock(_): return datetime.now(timezone.utc).timestamp()
|
|
65
|
+
self._clock = clock
|
|
66
|
+
|
|
67
|
+
os.makedirs(self._buffer_dir, exist_ok=True)
|
|
68
|
+
self._init_db()
|
|
69
|
+
|
|
70
|
+
def get_db_path(self) -> str:
|
|
71
|
+
"""Returns the path to the SQLite database."""
|
|
72
|
+
return self._db_path
|
|
73
|
+
|
|
74
|
+
def get_buffer_dir(self) -> str:
|
|
75
|
+
"""Returns the path to the buffer directory."""
|
|
76
|
+
return self._buffer_dir
|
|
77
|
+
|
|
78
|
+
def _init_db(self) -> None:
|
|
79
|
+
"""Initialize SQLite database with attempt tracking."""
|
|
80
|
+
conn = sqlite3.connect(self._db_path)
|
|
81
|
+
cursor = conn.cursor()
|
|
82
|
+
cursor.execute("""
|
|
83
|
+
CREATE TABLE IF NOT EXISTS file_buffer (
|
|
84
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
85
|
+
filename TEXT UNIQUE,
|
|
86
|
+
filepath TEXT UNIQUE,
|
|
87
|
+
status TEXT DEFAULT 'pending',
|
|
88
|
+
attempts INTEGER DEFAULT 0,
|
|
89
|
+
timestamp REAL
|
|
90
|
+
)
|
|
91
|
+
""")
|
|
92
|
+
conn.commit()
|
|
93
|
+
conn.close()
|
|
94
|
+
logger.info("Database initialized.")
|
|
95
|
+
|
|
96
|
+
def scan_existing_files(self, clock: Optional[Callable[[str], float]] = None) -> None:
|
|
97
|
+
"""
|
|
98
|
+
Detect files added while offline and process them.
|
|
99
|
+
:param clock: Optional callable that returns the timestamp for the file based on the filename.
|
|
100
|
+
If not provided, uses the file's creation time.
|
|
101
|
+
This may be innaccurate if multiple files are added at once, or if the file system does not support accurate timestamps.
|
|
102
|
+
|
|
103
|
+
"""
|
|
104
|
+
if clock is None:
|
|
105
|
+
# Default clock function to get current time in seconds since epoch
|
|
106
|
+
def clock(filepath: str) -> float:
|
|
107
|
+
return os.path.getctime(filepath)
|
|
108
|
+
|
|
109
|
+
conn = sqlite3.connect(self._db_path)
|
|
110
|
+
cursor = conn.cursor()
|
|
111
|
+
|
|
112
|
+
files_with_timestamps = [
|
|
113
|
+
(filename, clock(os.path.join(self._buffer_dir, filename)))
|
|
114
|
+
for filename in os.listdir(self._buffer_dir)
|
|
115
|
+
if os.path.isfile(os.path.join(self._buffer_dir, filename))
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
for filename, timestamp in sorted(files_with_timestamps, key=lambda x: x[1]):
|
|
119
|
+
filepath = os.path.join(self._buffer_dir, filename)
|
|
120
|
+
timestamp = clock(filepath)
|
|
121
|
+
|
|
122
|
+
cursor.execute(
|
|
123
|
+
"SELECT COUNT(*) FROM file_buffer WHERE filename = ?", (filename,))
|
|
124
|
+
exists: int = cursor.fetchone()[0]
|
|
125
|
+
|
|
126
|
+
if not exists:
|
|
127
|
+
logger.info(f"Registering existing {filename} for processing...")
|
|
128
|
+
cursor.execute("INSERT INTO file_buffer (filename, filepath, status, attempts, timestamp) VALUES (?, ?, 'pending', 0, ?)",
|
|
129
|
+
(filename, filepath, timestamp))
|
|
130
|
+
|
|
131
|
+
conn.commit()
|
|
132
|
+
conn.close()
|
|
133
|
+
|
|
134
|
+
def process_next_file(self) -> Optional[str]:
|
|
135
|
+
"""
|
|
136
|
+
Attempt to process the next file in the sequence (if exists), ensuring strict order.
|
|
137
|
+
Returns the filepath of the processed file if successful or None if no pending files are found or failed.
|
|
138
|
+
"""
|
|
139
|
+
conn = sqlite3.connect(self._db_path)
|
|
140
|
+
cursor = conn.cursor()
|
|
141
|
+
cursor.execute(
|
|
142
|
+
"SELECT filepath, attempts FROM file_buffer WHERE status = 'pending' ORDER BY timestamp ASC LIMIT 1")
|
|
143
|
+
|
|
144
|
+
pending_files: list[tuple[str, int]] = cursor.fetchall()
|
|
145
|
+
conn.close()
|
|
146
|
+
|
|
147
|
+
assert len(pending_files) <= 1, "More than one pending file found, which violates strict order."
|
|
148
|
+
|
|
149
|
+
for (filepath, attempts) in pending_files:
|
|
150
|
+
if self._process_file(filepath, attempts):
|
|
151
|
+
return filepath
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
def _process_file(self, filepath: str, attempts: int) -> bool:
|
|
155
|
+
if not os.path.exists(filepath):
|
|
156
|
+
logger.warning(f"File {filepath} does not exist. Skipping.")
|
|
157
|
+
self._mark_as_failed(filepath)
|
|
158
|
+
try:
|
|
159
|
+
success: bool = self._process_callback(filepath)
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.error(f"Processing error for {filepath}: {e}")
|
|
162
|
+
success = False
|
|
163
|
+
|
|
164
|
+
if success:
|
|
165
|
+
self._mark_as_processed(filepath)
|
|
166
|
+
return True
|
|
167
|
+
|
|
168
|
+
if attempts >= self._max_attempts:
|
|
169
|
+
logger.warning(
|
|
170
|
+
f"Max retries reached for {filepath}. Moving to next file.")
|
|
171
|
+
|
|
172
|
+
self._mark_as_failed(filepath)
|
|
173
|
+
return True # Give up and move to next file
|
|
174
|
+
else:
|
|
175
|
+
self._increment_attempt(filepath)
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
def _increment_attempt(self, filepath: str) -> None:
|
|
179
|
+
# Reopen DB to update attempt count
|
|
180
|
+
conn = sqlite3.connect(self._db_path)
|
|
181
|
+
cursor = conn.cursor()
|
|
182
|
+
cursor.execute(
|
|
183
|
+
"UPDATE file_buffer SET attempts = attempts + 1 WHERE filepath = ?", (filepath,))
|
|
184
|
+
conn.commit()
|
|
185
|
+
conn.close()
|
|
186
|
+
|
|
187
|
+
def _mark_as_failed(self, filepath: str) -> None:
|
|
188
|
+
"""Marks a file as failed in the database."""
|
|
189
|
+
conn = sqlite3.connect(self._db_path)
|
|
190
|
+
cursor = conn.cursor()
|
|
191
|
+
cursor.execute(
|
|
192
|
+
"UPDATE file_buffer SET status = 'failed' WHERE filepath = ?", (filepath,))
|
|
193
|
+
conn.commit()
|
|
194
|
+
conn.close()
|
|
195
|
+
logger.info(f"File {filepath} marked as failed.")
|
|
196
|
+
|
|
197
|
+
def _mark_as_processed(self, filepath: str) -> None:
|
|
198
|
+
"""Marks a file as processed in the database."""
|
|
199
|
+
conn = sqlite3.connect(self._db_path)
|
|
200
|
+
cursor = conn.cursor()
|
|
201
|
+
cursor.execute(
|
|
202
|
+
"UPDATE file_buffer SET status = 'processed' WHERE filepath = ?", (filepath,))
|
|
203
|
+
conn.commit()
|
|
204
|
+
conn.close()
|
|
205
|
+
logger.info(f"File {filepath} marked as processed.")
|
|
206
|
+
|
|
207
|
+
def cleanup_old_files_with_max_files(self) -> None:
|
|
208
|
+
"""Remove old files beyond the max file count."""
|
|
209
|
+
if self._max_files is None:
|
|
210
|
+
return
|
|
211
|
+
conn = sqlite3.connect(self._db_path)
|
|
212
|
+
cursor = conn.cursor()
|
|
213
|
+
cursor.execute(
|
|
214
|
+
"SELECT filepath FROM file_buffer ORDER BY timestamp DESC LIMIT -1 OFFSET ?", (self._max_files,))
|
|
215
|
+
files_to_delete: list[tuple[str]] = cursor.fetchall()
|
|
216
|
+
conn.close()
|
|
217
|
+
|
|
218
|
+
for (filepath,) in reversed(files_to_delete):
|
|
219
|
+
if os.path.exists(filepath):
|
|
220
|
+
os.remove(filepath)
|
|
221
|
+
conn = sqlite3.connect(self._db_path)
|
|
222
|
+
cursor = conn.cursor()
|
|
223
|
+
cursor.execute(
|
|
224
|
+
"DELETE FROM file_buffer WHERE filepath = ?", (filepath,))
|
|
225
|
+
conn.commit()
|
|
226
|
+
conn.close()
|
|
227
|
+
logger.info(f"Deleted old file {filepath}")
|
|
228
|
+
|
|
229
|
+
def cleanup_old_files(self) -> None:
|
|
230
|
+
"""Remove old files based on max_files and free space."""
|
|
231
|
+
if self._max_files is not None:
|
|
232
|
+
self.cleanup_old_files_with_max_files()
|
|
233
|
+
|
|
234
|
+
if self._free_space_mb is not None:
|
|
235
|
+
self.cleanup_old_files_with_free_space()
|
|
236
|
+
|
|
237
|
+
def cleanup_old_files_with_free_space(self) -> None:
|
|
238
|
+
"""Remove old files ensuring free space is maintained."""
|
|
239
|
+
if self._free_space_mb is None:
|
|
240
|
+
return
|
|
241
|
+
|
|
242
|
+
def free_space_mb() -> float:
|
|
243
|
+
"""Returns the free space in MB on the partition where the buffer directory is located."""
|
|
244
|
+
total, used, free = shutil.disk_usage(self._buffer_dir)
|
|
245
|
+
return free / (1024 * 1024)
|
|
246
|
+
|
|
247
|
+
while free_space_mb() < self._free_space_mb:
|
|
248
|
+
if not self._delete_oldest_file():
|
|
249
|
+
logger.warning("No more files to delete to free up space.")
|
|
250
|
+
break
|
|
251
|
+
|
|
252
|
+
def _delete_oldest_file(self) -> bool:
|
|
253
|
+
"""Deletes the oldest file in the buffer directory."""
|
|
254
|
+
conn = sqlite3.connect(self._db_path)
|
|
255
|
+
cursor = conn.cursor()
|
|
256
|
+
cursor.execute(
|
|
257
|
+
"SELECT filepath FROM file_buffer ORDER BY timestamp ASC LIMIT 1")
|
|
258
|
+
oldest_file: Optional[tuple[str]] = cursor.fetchone()
|
|
259
|
+
if not oldest_file:
|
|
260
|
+
logger.info("No files to delete.")
|
|
261
|
+
return False
|
|
262
|
+
|
|
263
|
+
filepath = oldest_file[0]
|
|
264
|
+
removed = False
|
|
265
|
+
if os.path.exists(filepath):
|
|
266
|
+
os.remove(filepath)
|
|
267
|
+
cursor.execute(
|
|
268
|
+
"DELETE FROM file_buffer WHERE filepath = ?", (filepath,))
|
|
269
|
+
conn.commit()
|
|
270
|
+
removed = True
|
|
271
|
+
logger.info(f"Deleted oldest file {filepath}")
|
|
272
|
+
else:
|
|
273
|
+
logger.warning(f"Oldest file {filepath} does not exist.")
|
|
274
|
+
|
|
275
|
+
conn.close()
|
|
276
|
+
return removed
|
|
277
|
+
|
|
278
|
+
def add_file_to_db(self, filepath: str) -> None:
|
|
279
|
+
"""Adds a new file to database and triggers processing."""
|
|
280
|
+
timestamp = self._clock(filepath)
|
|
281
|
+
filename: str = os.path.basename(filepath)
|
|
282
|
+
conn = sqlite3.connect(self._db_path)
|
|
283
|
+
cursor = conn.cursor()
|
|
284
|
+
|
|
285
|
+
cursor.execute("""
|
|
286
|
+
INSERT OR IGNORE INTO file_buffer (filename, filepath, status, attempts, timestamp)
|
|
287
|
+
VALUES (?, ?, 'pending', 0, ?)
|
|
288
|
+
""", (filename, filepath, timestamp))
|
|
289
|
+
|
|
290
|
+
conn.commit()
|
|
291
|
+
conn.close()
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import itertools
|
|
2
|
+
import shutil
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Iterable, Iterator, TypeVar
|
|
5
|
+
|
|
6
|
+
import requests
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_my_public_ip() -> str:
|
|
10
|
+
"""
|
|
11
|
+
Fetches the public IP address of the machine making the request.
|
|
12
|
+
Uses the 'checkip.amazonaws.com' service to retrieve the IP address.
|
|
13
|
+
:return The public IP address as a string in the form 'x.x.x.x'.
|
|
14
|
+
:raises
|
|
15
|
+
requests.RequestException: If the request to the IP service fails.
|
|
16
|
+
requests.Timeout: If the request times out.
|
|
17
|
+
"""
|
|
18
|
+
r = requests.get('https://checkip.amazonaws.com', timeout=30)
|
|
19
|
+
r.raise_for_status()
|
|
20
|
+
return r.content.decode().strip()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_directory_size(directory: str) -> int:
|
|
24
|
+
"""
|
|
25
|
+
Returns the total size (in bytes) of the target directory, including all subdirectories.
|
|
26
|
+
|
|
27
|
+
:param directory: Path to the target directory.
|
|
28
|
+
:return: Total size in bytes.
|
|
29
|
+
"""
|
|
30
|
+
dir_path = Path(directory)
|
|
31
|
+
return sum(f.stat().st_size for f in dir_path.rglob("*") if f.is_file())
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_free_space_excluding_files(directory: str) -> int:
|
|
35
|
+
"""
|
|
36
|
+
Returns the number of free bytes on the partition of the target directory,
|
|
37
|
+
excluding the total size of files in that directory.
|
|
38
|
+
|
|
39
|
+
:param directory: Path to the target directory.
|
|
40
|
+
:return: Free bytes available in the partition after subtracting file sizes.
|
|
41
|
+
"""
|
|
42
|
+
# Get partition's free space
|
|
43
|
+
total, used, free = shutil.disk_usage(directory)
|
|
44
|
+
|
|
45
|
+
# Calculate the total size of files in the directory
|
|
46
|
+
files_size = get_directory_size(directory)
|
|
47
|
+
|
|
48
|
+
# Exclude file sizes from free space
|
|
49
|
+
return max(0, free - files_size)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
T = TypeVar('T')
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def batched(iterable: Iterable[T], n: int) -> Iterator[tuple[T, ...]]:
|
|
56
|
+
"""
|
|
57
|
+
Implementation of itertools.batched for < Python 3.12
|
|
58
|
+
"""
|
|
59
|
+
it = iter(iterable)
|
|
60
|
+
while True:
|
|
61
|
+
chunk = tuple(itertools.islice(it, n))
|
|
62
|
+
if not chunk:
|
|
63
|
+
break
|
|
64
|
+
yield chunk
|
mercuto_client/mocks.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import dataclasses
|
|
3
|
+
import re
|
|
4
|
+
import uuid
|
|
5
|
+
from typing import Any, Optional, Protocol
|
|
6
|
+
|
|
7
|
+
from .exceptions import MercutoClientException
|
|
8
|
+
from .types import Tenant, User, UserDetails, VerifyMeResult
|
|
9
|
+
|
|
10
|
+
"""
|
|
11
|
+
This module provides a context manager for mocking the Mercuto Client.
|
|
12
|
+
|
|
13
|
+
Within the mock_client() context, any calls to the Mercuto Client will be intercepted and handled by the MercutoMocker.
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
Usage:
|
|
17
|
+
|
|
18
|
+
client = MercutoClient()
|
|
19
|
+
|
|
20
|
+
with mock_client() as mock:
|
|
21
|
+
# Create a user in the system
|
|
22
|
+
api_key = mock.add_user()
|
|
23
|
+
|
|
24
|
+
# Login using the generated user key
|
|
25
|
+
client.connect(api_key = api_key)
|
|
26
|
+
|
|
27
|
+
# Call Mercuto Client endpoints as normal
|
|
28
|
+
client.identity().verify_me()
|
|
29
|
+
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class MakeRequestHookType(Protocol):
|
|
34
|
+
def __call__(self, method: str, url: str, *args, **kwargs) -> Any:
|
|
35
|
+
"""
|
|
36
|
+
*args and **kwargs are arguments passed to requests.request(method, url, *args, **kwargs)
|
|
37
|
+
"""
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@contextlib.contextmanager
|
|
42
|
+
def mock_client():
|
|
43
|
+
from .client import MercutoClient
|
|
44
|
+
original = MercutoClient._make_request
|
|
45
|
+
mocker = MercutoMocker()
|
|
46
|
+
try:
|
|
47
|
+
setattr(MercutoClient, '_make_request', mocker._on_make_request)
|
|
48
|
+
yield mocker
|
|
49
|
+
finally:
|
|
50
|
+
setattr(MercutoClient, '_make_request', original)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class MercutoMocker:
|
|
54
|
+
@dataclasses.dataclass
|
|
55
|
+
class MockedUser:
|
|
56
|
+
code: str
|
|
57
|
+
username: str
|
|
58
|
+
description: str
|
|
59
|
+
tenant: str
|
|
60
|
+
permission_group: str
|
|
61
|
+
mobile_number: Optional[str] = None
|
|
62
|
+
|
|
63
|
+
def __init__(self) -> None:
|
|
64
|
+
self._hooks: dict[tuple[str, str], MakeRequestHookType] = {}
|
|
65
|
+
|
|
66
|
+
# Users based on Api-Key
|
|
67
|
+
self._known_users: dict[str, MercutoMocker.MockedUser] = {}
|
|
68
|
+
self._known_tenants: dict[str, Tenant] = {}
|
|
69
|
+
|
|
70
|
+
self._setup_default_hooks()
|
|
71
|
+
|
|
72
|
+
def on(self, method: str, path: str, callback: MakeRequestHookType):
|
|
73
|
+
self._hooks[(method, path)] = callback
|
|
74
|
+
|
|
75
|
+
def add_tenant(self, code: Optional[str] = None) -> None:
|
|
76
|
+
if code is None:
|
|
77
|
+
code = str(uuid.uuid4())
|
|
78
|
+
self._known_tenants[code] = Tenant(
|
|
79
|
+
code=code,
|
|
80
|
+
name=f"Tenant {code}",
|
|
81
|
+
description=f"Tenant {code}",
|
|
82
|
+
logo_url=None,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
def add_user(self,
|
|
86
|
+
user: Optional[str] = None,
|
|
87
|
+
tenant: Optional[str] = None,
|
|
88
|
+
permission_group: Optional[str] = None,
|
|
89
|
+
key: Optional[str] = None,
|
|
90
|
+
username: Optional[str] = None,
|
|
91
|
+
description: Optional[str] = None,
|
|
92
|
+
mobile_number: Optional[str] = None,
|
|
93
|
+
):
|
|
94
|
+
if user is None:
|
|
95
|
+
# User code
|
|
96
|
+
user = str(uuid.uuid4())
|
|
97
|
+
if tenant is None:
|
|
98
|
+
tenant = str(uuid.uuid4())
|
|
99
|
+
if permission_group is None:
|
|
100
|
+
permission_group = str(uuid.uuid4())
|
|
101
|
+
if key is None:
|
|
102
|
+
key = str(uuid.uuid4())
|
|
103
|
+
if username is None:
|
|
104
|
+
username = f"{user.replace(' ', '')}@example.com"
|
|
105
|
+
if description is None:
|
|
106
|
+
description = f"User {user}"
|
|
107
|
+
|
|
108
|
+
mocked = self.MockedUser(user, username, description, tenant, permission_group,
|
|
109
|
+
mobile_number=mobile_number)
|
|
110
|
+
self._known_users[key] = mocked
|
|
111
|
+
return key
|
|
112
|
+
|
|
113
|
+
def delete_user(self, key: Optional[str] = None, code: Optional[str] = None):
|
|
114
|
+
if key is not None and key in self._known_users:
|
|
115
|
+
del self._known_users[key]
|
|
116
|
+
|
|
117
|
+
if code is not None:
|
|
118
|
+
key = next((k for k, u in self._known_users.items()
|
|
119
|
+
if u.code == code), None)
|
|
120
|
+
if key is not None:
|
|
121
|
+
del self._known_users[key]
|
|
122
|
+
|
|
123
|
+
def _mocked_get_tenant(self, method: str, url: str, *args, **kwargs) -> Tenant:
|
|
124
|
+
key = kwargs.get('headers', {}).get('X-Api-Key', None)
|
|
125
|
+
if key is None:
|
|
126
|
+
raise MercutoClientException("No X-Api-Key header provided")
|
|
127
|
+
tenant = self._known_tenants.get(key, None)
|
|
128
|
+
if tenant is None:
|
|
129
|
+
raise MercutoClientException("Tenant not found")
|
|
130
|
+
return tenant
|
|
131
|
+
|
|
132
|
+
def _mocked_verify_me(self, method: str, url: str, *args, **kwargs) -> VerifyMeResult:
|
|
133
|
+
key = kwargs.get('headers', {}).get('X-Api-Key', None)
|
|
134
|
+
if key is None:
|
|
135
|
+
raise MercutoClientException("No X-Api-Key header provided")
|
|
136
|
+
user = self._known_users.get(key, None)
|
|
137
|
+
if user is None:
|
|
138
|
+
raise MercutoClientException(f"User {user} not found")
|
|
139
|
+
return VerifyMeResult(
|
|
140
|
+
user=user.code,
|
|
141
|
+
tenant=user.tenant,
|
|
142
|
+
permission_group=user.permission_group,
|
|
143
|
+
acl_policy='{"version": 1, "permissions": []}'
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
def _mocked_get_user(self, method: str, url: str, *args, **kwargs) -> User:
|
|
147
|
+
apikey = kwargs.get('headers', {}).get('X-Api-Key', None)
|
|
148
|
+
servicekey = kwargs.get('headers', {}).get('X-Service-Token', None)
|
|
149
|
+
if apikey is None and servicekey is None:
|
|
150
|
+
raise MercutoClientException(
|
|
151
|
+
"No X-Api-Key or X-Service-Token header provided")
|
|
152
|
+
user_code = url.split('/')[-1]
|
|
153
|
+
user = next((u for u in self._known_users.values()
|
|
154
|
+
if u.code == user_code), None)
|
|
155
|
+
if user is None:
|
|
156
|
+
raise MercutoClientException("User not found")
|
|
157
|
+
return User(
|
|
158
|
+
code=user.code,
|
|
159
|
+
username=user.username,
|
|
160
|
+
description=user.description,
|
|
161
|
+
tenant=user.tenant,
|
|
162
|
+
permission_group=user.permission_group,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
def _mocked_get_user_details(self, method: str, url: str, *args, **kwargs) -> UserDetails:
|
|
166
|
+
apikey = kwargs.get('headers', {}).get('X-Api-Key', None)
|
|
167
|
+
servicekey = kwargs.get('headers', {}).get('X-Service-Token', None)
|
|
168
|
+
if apikey is None and servicekey is None:
|
|
169
|
+
raise MercutoClientException(
|
|
170
|
+
"No X-Api-Key or X-Service-Token header provided")
|
|
171
|
+
url_parts = url.split('/')
|
|
172
|
+
assert url_parts[-1] == 'details'
|
|
173
|
+
user_code = url_parts[-2]
|
|
174
|
+
user = next((u for u in self._known_users.values()
|
|
175
|
+
if u.code == user_code), None)
|
|
176
|
+
if user is None:
|
|
177
|
+
raise MercutoClientException("User not found")
|
|
178
|
+
return UserDetails(
|
|
179
|
+
code=user.code,
|
|
180
|
+
username=user.username,
|
|
181
|
+
mobile_number=user.mobile_number,
|
|
182
|
+
email_address=None,
|
|
183
|
+
first_name=None,
|
|
184
|
+
last_name=None,
|
|
185
|
+
api_keys=[]
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
def _setup_default_hooks(self) -> None:
|
|
189
|
+
self.on('GET', r'\/identity\/verify\/me', self._mocked_verify_me)
|
|
190
|
+
self.on('GET', r'\/identity\/users\/[^\/]+', self._mocked_get_user)
|
|
191
|
+
self.on('GET', r'\/identity\/tenant\/[^\/]+', self._mocked_get_tenant)
|
|
192
|
+
self.on(
|
|
193
|
+
'GET', r'\/identity\/users\/[^\/]+\/details', self._mocked_get_user_details)
|
|
194
|
+
|
|
195
|
+
def _on_make_request(self, method: str, url: str, *args, **kwargs) -> Any:
|
|
196
|
+
|
|
197
|
+
# First check any custom hooks
|
|
198
|
+
for (hook_method, pattern), callback in self._hooks.items():
|
|
199
|
+
if method == hook_method and re.fullmatch(pattern, url) is not None:
|
|
200
|
+
return callback(method, url, *args, **kwargs)
|
|
201
|
+
|
|
202
|
+
raise NotImplementedError(
|
|
203
|
+
"Mocking is not supported for this endpoint: %s %s" % (method, url))
|
mercuto_client/py.typed
ADDED
|
File without changes
|