dmart 1.4.17__py3-none-any.whl → 1.4.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dmart-1.4.17.dist-info → dmart-1.4.19.dist-info}/METADATA +1 -1
- {dmart-1.4.17.dist-info → dmart-1.4.19.dist-info}/RECORD +7 -17
- {dmart-1.4.17.dist-info → dmart-1.4.19.dist-info}/top_level.txt +0 -1
- dmart.py +25 -2
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +0 -16
- pytests/api_user_models_requests_test.py +0 -98
- pytests/archive_test.py +0 -72
- pytests/base_test.py +0 -300
- pytests/get_settings_test.py +0 -14
- pytests/json_to_db_migration_test.py +0 -237
- pytests/service_test.py +0 -26
- pytests/test_info.py +0 -55
- pytests/test_status.py +0 -15
- {dmart-1.4.17.dist-info → dmart-1.4.19.dist-info}/WHEEL +0 -0
- {dmart-1.4.17.dist-info → dmart-1.4.19.dist-info}/entry_points.txt +0 -0
- /alembic.ini → /dmart_migrations/alembic.ini +0 -0
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
alembic.ini,sha256=zs8d8VhH9TRwZrPK697EZPBCfaKvI4SqTuGuG0FAm2Y,3747
|
|
2
1
|
bundler.py,sha256=MDTUTVS0WL7gb-i1rDcS4i37KTqaW88TluL4L3rwjRw,1817
|
|
3
2
|
data_generator.py,sha256=CnE-VHEeX7-lAXtqCgbRqR9WHjTuOgeiZcviYrHAmho,2287
|
|
4
|
-
dmart.py,sha256=
|
|
3
|
+
dmart.py,sha256=xaWzoI7xbSizlBD5Cu27FxeERJ7O69I-HZGGBQ5ucfA,24352
|
|
5
4
|
get_settings.py,sha256=Sbe2WCoiK398E7HY4SNLfDN_GmE8knR4M-YJWF31jcg,153
|
|
6
5
|
info.json,sha256=hXQWl19lfMkEj_zXdehGeKjiKGNJ7emY4S7d4pIqJ1E,123
|
|
7
6
|
main.py,sha256=KZGhIL6AnEm5ZAPy4IvhBDpzSTjuodilV7NafNOyhzM,19676
|
|
@@ -175,6 +174,7 @@ data_adapters/sql/json_to_db_migration.py,sha256=KaubDrRZ3MfPLc-CNGPpsEccPELKr1V
|
|
|
175
174
|
data_adapters/sql/update_query_policies.py,sha256=LUpkyzDAkiwwVWnXxPdVAU6atehW72ECAazj3mEYHK0,3857
|
|
176
175
|
dmart_migrations/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38
|
|
177
176
|
dmart_migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
177
|
+
dmart_migrations/alembic.ini,sha256=zs8d8VhH9TRwZrPK697EZPBCfaKvI4SqTuGuG0FAm2Y,3747
|
|
178
178
|
dmart_migrations/env.py,sha256=P1gnJRIUrP2jLHZ7HDbfkzkpuZJ_LMWqw05HIqOUJHo,3110
|
|
179
179
|
dmart_migrations/notes.txt,sha256=X7LYSNmulLRiAUXvw07Z85bSGRTnx4NUpoNC9D2DfO8,395
|
|
180
180
|
dmart_migrations/script.py.mako,sha256=u-ABdYW1tcIILgJFJdQZYLxoAI8Awd6wZbx30rY6fxU,680
|
|
@@ -246,16 +246,6 @@ plugins/system_notification_sender/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
|
|
|
246
246
|
plugins/system_notification_sender/plugin.py,sha256=MUOujwyRJ3yQrXwZ-X4qSdx0ZNU2c-sYy0d0-U8twoA,8253
|
|
247
247
|
plugins/update_access_controls/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
248
248
|
plugins/update_access_controls/plugin.py,sha256=43UV4vg-zxBF_7Bv0AZH6gU0Bgy2ybapNK21wJTF05k,301
|
|
249
|
-
pytests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
250
|
-
pytests/api_user_models_erros_test.py,sha256=6VWLIhazYjz7avXIMIDpT6doiBO5FVzGsGJ3Cv8cXyg,583
|
|
251
|
-
pytests/api_user_models_requests_test.py,sha256=1AYZcMwa-AVeGrhTgwIkwqw3w7_CDPkbaJ0YDxLLKdY,3859
|
|
252
|
-
pytests/archive_test.py,sha256=rk6jEZf-Ud7ReyH4_xJD-9SzNRz8p2Sg0qQX04VCw9M,2347
|
|
253
|
-
pytests/base_test.py,sha256=d8prlME29tBnirW-3_HUtixcxUMPiLfJHRDiNkxOCRM,9902
|
|
254
|
-
pytests/get_settings_test.py,sha256=AEqjnHsQjkVDqwVqtn2rN6mep4sAC_apDCgiZT4YQ28,281
|
|
255
|
-
pytests/json_to_db_migration_test.py,sha256=JXO0knKPccXVIbKmyuD0yOi5fSBHmXm_NgVdO1_U7AE,9411
|
|
256
|
-
pytests/service_test.py,sha256=92lqzKQoVMkj9XliPBjkGBxXb4zXsobb2WPfW5buQfc,807
|
|
257
|
-
pytests/test_info.py,sha256=IOKtcEPM_03byhp5dSt2YbhTC5u_ORPahQLifZWBpjg,2074
|
|
258
|
-
pytests/test_status.py,sha256=YFuBTsSd5hkpHp16GAbQ_I03RL_o2_yW-92ZNgKJry0,453
|
|
259
249
|
utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
260
250
|
utils/access_control.py,sha256=8cCKr-6bL-Shl8j8xtfjEykMPGy6wkbNz-NRwLCdx-Y,11757
|
|
261
251
|
utils/async_request.py,sha256=Lm2xGXLeph7P1_fLhhNJDhPubKT2ncFn_Ueft4JVoeI,255
|
|
@@ -282,8 +272,8 @@ utils/ticket_sys_utils.py,sha256=9QAlW2iiy8KyxQRBDj_WmzS5kKb0aYJmGwd4qzmGVqo,700
|
|
|
282
272
|
utils/web_notifier.py,sha256=QM87VVid2grC5lK3NdS1yzz0z1wXljr4GChJOeK86W4,843
|
|
283
273
|
utils/templates/activation.html.j2,sha256=XAMKCdoqONoc4ZQucD0yV-Pg5DlHHASZrTVItNS-iBE,640
|
|
284
274
|
utils/templates/reminder.html.j2,sha256=aoS8bTs56q4hjAZKsb0jV9c-PIURBELuBOpT_qPZNVU,639
|
|
285
|
-
dmart-1.4.
|
|
286
|
-
dmart-1.4.
|
|
287
|
-
dmart-1.4.
|
|
288
|
-
dmart-1.4.
|
|
289
|
-
dmart-1.4.
|
|
275
|
+
dmart-1.4.19.dist-info/METADATA,sha256=fvC35gtYzj_QldMMoGBbBnWYuKZo_hrY87dBTygXgzE,2149
|
|
276
|
+
dmart-1.4.19.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
277
|
+
dmart-1.4.19.dist-info/entry_points.txt,sha256=GjfoGh1bpxuU9HHGJzbtCFPNptHv9TryxHMN3uBSKpg,37
|
|
278
|
+
dmart-1.4.19.dist-info/top_level.txt,sha256=S-gfX1pLerapNXiHZ8lvPYoV7sgwSX2_NCZ6xfzDUHM,267
|
|
279
|
+
dmart-1.4.19.dist-info/RECORD,,
|
dmart.py
CHANGED
|
@@ -19,11 +19,32 @@ from hypercorn.config import Config
|
|
|
19
19
|
from hypercorn.run import run
|
|
20
20
|
|
|
21
21
|
try:
|
|
22
|
+
import alembic
|
|
23
|
+
# Check if we are importing the local alembic directory which is likely a leftover
|
|
24
|
+
if hasattr(alembic, '__path__'):
|
|
25
|
+
local_alembic_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'alembic'))
|
|
26
|
+
for p in alembic.__path__:
|
|
27
|
+
if os.path.abspath(p) == local_alembic_path:
|
|
28
|
+
# Check if it is a namespace package (no __init__.py)
|
|
29
|
+
if not os.path.exists(os.path.join(p, '__init__.py')):
|
|
30
|
+
# It is a leftover directory
|
|
31
|
+
try:
|
|
32
|
+
shutil.rmtree(p)
|
|
33
|
+
# Force reload
|
|
34
|
+
import sys
|
|
35
|
+
if 'alembic' in sys.modules:
|
|
36
|
+
del sys.modules['alembic']
|
|
37
|
+
import alembic
|
|
38
|
+
except Exception:
|
|
39
|
+
pass
|
|
40
|
+
|
|
22
41
|
from alembic import command as alembic_command
|
|
23
42
|
from alembic.config import Config as AlembicConfig
|
|
24
43
|
HAS_ALEMBIC = True
|
|
25
|
-
|
|
44
|
+
ALEMBIC_ERROR = None
|
|
45
|
+
except ImportError as e:
|
|
26
46
|
HAS_ALEMBIC = False
|
|
47
|
+
ALEMBIC_ERROR = e
|
|
27
48
|
|
|
28
49
|
from data_adapters.file.archive import archive
|
|
29
50
|
from data_adapters.file.create_index import main as create_index
|
|
@@ -549,7 +570,7 @@ def main():
|
|
|
549
570
|
# Fallback to alembic for backward compatibility
|
|
550
571
|
alembic_dir = pkg_dir / "alembic"
|
|
551
572
|
|
|
552
|
-
original_ini_path = pkg_dir / "alembic.ini"
|
|
573
|
+
original_ini_path = pkg_dir / "dmart_migrations" / "alembic.ini"
|
|
553
574
|
if not original_ini_path.exists():
|
|
554
575
|
original_ini_path = alembic_dir / "alembic.ini"
|
|
555
576
|
|
|
@@ -561,6 +582,8 @@ def main():
|
|
|
561
582
|
# Check if alembic library is installed
|
|
562
583
|
if not HAS_ALEMBIC:
|
|
563
584
|
print("Error: 'alembic' library not found. Please install it with 'pip install alembic'.")
|
|
585
|
+
if ALEMBIC_ERROR:
|
|
586
|
+
print(f"Details: {ALEMBIC_ERROR}")
|
|
564
587
|
sys.exit(1)
|
|
565
588
|
|
|
566
589
|
alembic_cfg = AlembicConfig(str(original_ini_path))
|
pytests/__init__.py
DELETED
|
File without changes
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import pytest
|
|
2
|
-
from models.api import Error
|
|
3
|
-
from utils.internal_error_code import InternalErrorCode
|
|
4
|
-
from api.user.model.errors import INVALID_OTP, EXPIRED_OTP
|
|
5
|
-
|
|
6
|
-
def test_invalid_otp():
|
|
7
|
-
assert isinstance(INVALID_OTP, Error)
|
|
8
|
-
assert INVALID_OTP.type == "OTP"
|
|
9
|
-
assert INVALID_OTP.code == InternalErrorCode.OTP_INVALID
|
|
10
|
-
assert INVALID_OTP.message == "Invalid OTP"
|
|
11
|
-
|
|
12
|
-
def test_expired_otp():
|
|
13
|
-
assert isinstance(EXPIRED_OTP, Error)
|
|
14
|
-
assert EXPIRED_OTP.type == "OTP"
|
|
15
|
-
assert EXPIRED_OTP.code == InternalErrorCode.OTP_EXPIRED
|
|
16
|
-
assert EXPIRED_OTP.message == "Expired OTP"
|
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
import pytest
|
|
2
|
-
from pydantic import ValidationError
|
|
3
|
-
|
|
4
|
-
from api.user.model.requests import (
|
|
5
|
-
OTPType,
|
|
6
|
-
SendOTPRequest,
|
|
7
|
-
PasswordResetRequest,
|
|
8
|
-
ConfirmOTPRequest,
|
|
9
|
-
UserLoginRequest,
|
|
10
|
-
Exception,
|
|
11
|
-
Error,
|
|
12
|
-
InternalErrorCode
|
|
13
|
-
)
|
|
14
|
-
import utils.regex as rgx
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def test_send_otp_request_valid_msisdn():
|
|
18
|
-
request = SendOTPRequest(msisdn="7777778110")
|
|
19
|
-
result = request.check_fields()
|
|
20
|
-
assert result == {"msisdn": "7777778110"}
|
|
21
|
-
|
|
22
|
-
def test_send_otp_request_valid_email():
|
|
23
|
-
request = SendOTPRequest(email="test@example.com")
|
|
24
|
-
result = request.check_fields()
|
|
25
|
-
assert result == {"email": "test@example.com"}
|
|
26
|
-
|
|
27
|
-
def test_send_otp_request_missing_fields():
|
|
28
|
-
# Ensure both fields are explicitly None
|
|
29
|
-
with pytest.raises(Exception) as excinfo:
|
|
30
|
-
SendOTPRequest(msisdn=None, email=None).check_fields()
|
|
31
|
-
assert excinfo.value.status_code == 422
|
|
32
|
-
assert excinfo.value.error.code == InternalErrorCode.EMAIL_OR_MSISDN_REQUIRED
|
|
33
|
-
|
|
34
|
-
def test_send_otp_request_too_many_fields():
|
|
35
|
-
with pytest.raises(Exception) as excinfo:
|
|
36
|
-
SendOTPRequest(msisdn="7777778110", email="test@example.com").check_fields()
|
|
37
|
-
assert excinfo.value.status_code == 422
|
|
38
|
-
assert excinfo.value.error.code == InternalErrorCode.INVALID_STANDALONE_DATA
|
|
39
|
-
|
|
40
|
-
def test_password_reset_request_valid_msisdn():
|
|
41
|
-
request = PasswordResetRequest(msisdn="7777778110")
|
|
42
|
-
result = request.check_fields()
|
|
43
|
-
assert result == {"msisdn": "7777778110"}
|
|
44
|
-
|
|
45
|
-
def test_password_reset_request_valid_email():
|
|
46
|
-
request = PasswordResetRequest(email="test@example.com")
|
|
47
|
-
result = request.check_fields()
|
|
48
|
-
assert result == {"email": "test@example.com"}
|
|
49
|
-
|
|
50
|
-
def test_password_reset_request_missing_fields():
|
|
51
|
-
with pytest.raises(Exception) as excinfo:
|
|
52
|
-
PasswordResetRequest().check_fields()
|
|
53
|
-
assert excinfo.value.status_code == 422
|
|
54
|
-
assert excinfo.value.error.code == InternalErrorCode.EMAIL_OR_MSISDN_REQUIRED
|
|
55
|
-
|
|
56
|
-
def test_password_reset_request_too_many_fields():
|
|
57
|
-
with pytest.raises(Exception) as excinfo:
|
|
58
|
-
PasswordResetRequest(msisdn="7777778110", email="test@example.com").check_fields()
|
|
59
|
-
assert excinfo.value.status_code == 422
|
|
60
|
-
assert excinfo.value.error.code == InternalErrorCode.INVALID_STANDALONE_DATA
|
|
61
|
-
|
|
62
|
-
def test_confirm_otp_request_valid():
|
|
63
|
-
request = ConfirmOTPRequest(msisdn="7777778110", code="123456")
|
|
64
|
-
assert request.msisdn == "7777778110"
|
|
65
|
-
assert request.code == "123456"
|
|
66
|
-
|
|
67
|
-
def test_confirm_otp_request_invalid_code():
|
|
68
|
-
with pytest.raises(ValidationError):
|
|
69
|
-
ConfirmOTPRequest(msisdn="7777778110", code="invalid")
|
|
70
|
-
|
|
71
|
-
def test_user_login_request_valid_shortname():
|
|
72
|
-
request = UserLoginRequest(shortname="john_doo", password="my_secure_password_@_93301")
|
|
73
|
-
result = request.check_fields()
|
|
74
|
-
assert result == {"shortname": "john_doo"}
|
|
75
|
-
|
|
76
|
-
def test_user_login_request_valid_email():
|
|
77
|
-
request = UserLoginRequest(email="test@example.com", password="my_secure_password_@_93301")
|
|
78
|
-
result = request.check_fields()
|
|
79
|
-
assert result == {"email": "test@example.com"}
|
|
80
|
-
|
|
81
|
-
def test_user_login_request_valid_msisdn():
|
|
82
|
-
request = UserLoginRequest(msisdn="7777778110", password="my_secure_password_@_93301")
|
|
83
|
-
result = request.check_fields()
|
|
84
|
-
assert result == {"msisdn": "7777778110"}
|
|
85
|
-
|
|
86
|
-
def test_user_login_request_missing_fields():
|
|
87
|
-
request = UserLoginRequest(password="my_secure_password_@_93301")
|
|
88
|
-
result = request.check_fields()
|
|
89
|
-
assert result == {}
|
|
90
|
-
|
|
91
|
-
def test_user_login_request_too_many_fields():
|
|
92
|
-
with pytest.raises(ValueError, match="Too many input has been passed"):
|
|
93
|
-
UserLoginRequest(shortname="john_doo", email="test@example.com", msisdn="7777778110", password="my_secure_password_@_93301").check_fields()
|
|
94
|
-
|
|
95
|
-
def test_user_login_request_missing_password():
|
|
96
|
-
request = UserLoginRequest(shortname="john_doo")
|
|
97
|
-
result = request.check_fields()
|
|
98
|
-
assert result == {"shortname": "john_doo"}
|
pytests/archive_test.py
DELETED
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
# import pytest
|
|
2
|
-
# from unittest.mock import patch
|
|
3
|
-
# from time import time
|
|
4
|
-
# import argparse
|
|
5
|
-
# import asyncio
|
|
6
|
-
# from archive import redis_doc_to_meta, archive
|
|
7
|
-
#
|
|
8
|
-
#
|
|
9
|
-
# def test_redis_doc_to_meta():
|
|
10
|
-
# mock_record = {
|
|
11
|
-
# "resource_type": "record",
|
|
12
|
-
# "created_at": time(),
|
|
13
|
-
# "updated_at": time(),
|
|
14
|
-
# }
|
|
15
|
-
# expected_keys = ["resource_type", "created_at", "updated_at"]
|
|
16
|
-
# with patch("models.core.Record") as MockRecord:
|
|
17
|
-
# MockRecord.model_fields.keys.return_value = expected_keys
|
|
18
|
-
# MockRecord.model_validate.return_value = mock_record
|
|
19
|
-
# meta = redis_doc_to_meta(mock_record)
|
|
20
|
-
# assert meta == mock_record
|
|
21
|
-
# assert MockRecord.model_fields.keys.call_count == 3
|
|
22
|
-
# MockRecord.model_validate.assert_called_once()
|
|
23
|
-
#
|
|
24
|
-
# def main():
|
|
25
|
-
# parser = argparse.ArgumentParser(
|
|
26
|
-
# description="Script for archiving records from different spaces and subpaths."
|
|
27
|
-
# )
|
|
28
|
-
# parser.add_argument("space", type=str, help="The name of the space")
|
|
29
|
-
# parser.add_argument("subpath", type=str, help="The subpath within the space")
|
|
30
|
-
# parser.add_argument(
|
|
31
|
-
# "schema",
|
|
32
|
-
# type=str,
|
|
33
|
-
# help="The subpath within the space. Optional, if not provided move everything",
|
|
34
|
-
# nargs="?",
|
|
35
|
-
# )
|
|
36
|
-
# parser.add_argument(
|
|
37
|
-
# "olderthan",
|
|
38
|
-
# type=int,
|
|
39
|
-
# help="The number of day, older than which, the entries will be archived (based on updated_at)",
|
|
40
|
-
# )
|
|
41
|
-
#
|
|
42
|
-
# args = parser.parse_args()
|
|
43
|
-
# space = args.space
|
|
44
|
-
# subpath = args.subpath
|
|
45
|
-
# olderthan = args.olderthan
|
|
46
|
-
# schema = args.schema or "meta"
|
|
47
|
-
#
|
|
48
|
-
# asyncio.run(archive(space, subpath, schema, olderthan))
|
|
49
|
-
# print("Done.")
|
|
50
|
-
#
|
|
51
|
-
#
|
|
52
|
-
# @pytest.mark.asyncio
|
|
53
|
-
# @patch("argparse.ArgumentParser.parse_args")
|
|
54
|
-
# @patch("archive.archive")
|
|
55
|
-
# async def test_main(mock_archive, mock_parse_args):
|
|
56
|
-
# mock_args = argparse.Namespace(
|
|
57
|
-
# space="space",
|
|
58
|
-
# subpath="subpath",
|
|
59
|
-
# schema="schema",
|
|
60
|
-
# olderthan=1
|
|
61
|
-
# )
|
|
62
|
-
# mock_parse_args.return_value = mock_args
|
|
63
|
-
#
|
|
64
|
-
# with patch("asyncio.run") as mock_asyncio_run:
|
|
65
|
-
# mock_asyncio_run.side_effect = lambda x: asyncio.ensure_future(x)
|
|
66
|
-
# main()
|
|
67
|
-
#
|
|
68
|
-
# mock_parse_args.assert_called_once()
|
|
69
|
-
# mock_asyncio_run.assert_called_once()
|
|
70
|
-
#
|
|
71
|
-
# if __name__ == "__main__":
|
|
72
|
-
# pytest.main()
|
pytests/base_test.py
DELETED
|
@@ -1,300 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
from utils.settings import settings
|
|
3
|
-
from fastapi import status
|
|
4
|
-
from models.api import Query
|
|
5
|
-
from models.enums import QueryType, ResourceType, RequestType
|
|
6
|
-
|
|
7
|
-
superman = {}
|
|
8
|
-
alibaba = {}
|
|
9
|
-
|
|
10
|
-
with open("./login_creds.sh", "r") as file:
|
|
11
|
-
for line in file.readlines():
|
|
12
|
-
if line.strip().startswith("export SUPERMAN"):
|
|
13
|
-
superman = json.loads(str(line.strip().split("'")[1]))
|
|
14
|
-
if line.strip().startswith("export ALIBABA"):
|
|
15
|
-
alibaba = json.loads(str(line.strip().split("'")[1]))
|
|
16
|
-
|
|
17
|
-
MANAGEMENT_SPACE: str = f"{settings.management_space}"
|
|
18
|
-
USERS_SUBPATH: str = "users"
|
|
19
|
-
DEMO_SPACE: str = "test"
|
|
20
|
-
DEMO_SUBPATH: str = "content"
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
async def get_superman_cookie(client) -> str:
|
|
24
|
-
response = await client.post(
|
|
25
|
-
"/user/login",
|
|
26
|
-
json={"shortname": superman["shortname"], "password": superman["password"]},
|
|
27
|
-
)
|
|
28
|
-
print(f"\n {response.json() = } \n creds: {superman = } \n")
|
|
29
|
-
assert response.status_code == status.HTTP_200_OK
|
|
30
|
-
# client.cookies.set("auth_token", response.cookies["auth_token"])
|
|
31
|
-
return str(response.cookies["auth_token"])
|
|
32
|
-
|
|
33
|
-
async def set_superman_cookie(client):
|
|
34
|
-
response = await client.post(
|
|
35
|
-
"/user/login",
|
|
36
|
-
json={"shortname": superman["shortname"], "password": superman["password"]},
|
|
37
|
-
)
|
|
38
|
-
print(f"\n {response.json() = } \n creds: {superman = } \n")
|
|
39
|
-
assert response.status_code == status.HTTP_200_OK
|
|
40
|
-
client.cookies.set("auth_token", response.cookies["auth_token"])
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
async def set_alibaba_cookie(client):
|
|
44
|
-
response = await client.post(
|
|
45
|
-
"/user/login",
|
|
46
|
-
json={"shortname": alibaba["shortname"], "password": alibaba["password"]},
|
|
47
|
-
)
|
|
48
|
-
print(f"\n {response.json() = } \n creds: {alibaba = } \n")
|
|
49
|
-
assert response.status_code == status.HTTP_200_OK
|
|
50
|
-
client.cookies.set("auth_token", response.cookies["auth_token"])
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
async def init_test_db(client) -> None:
|
|
54
|
-
# Create the space
|
|
55
|
-
await client.post(
|
|
56
|
-
"managed/request",
|
|
57
|
-
json={
|
|
58
|
-
"space_name": DEMO_SPACE,
|
|
59
|
-
"request_type": RequestType.create,
|
|
60
|
-
"records": [
|
|
61
|
-
{
|
|
62
|
-
"resource_type": ResourceType.space,
|
|
63
|
-
"subpath": "/",
|
|
64
|
-
"shortname": DEMO_SPACE,
|
|
65
|
-
"attributes": {},
|
|
66
|
-
}
|
|
67
|
-
],
|
|
68
|
-
},
|
|
69
|
-
)
|
|
70
|
-
|
|
71
|
-
# Create the folder
|
|
72
|
-
await client.post(
|
|
73
|
-
"/managed/request",
|
|
74
|
-
json={
|
|
75
|
-
"space_name": DEMO_SPACE,
|
|
76
|
-
"request_type": RequestType.create,
|
|
77
|
-
"records": [
|
|
78
|
-
{
|
|
79
|
-
"resource_type": ResourceType.folder,
|
|
80
|
-
"subpath": "/",
|
|
81
|
-
"shortname": DEMO_SUBPATH,
|
|
82
|
-
"attributes": {},
|
|
83
|
-
}
|
|
84
|
-
],
|
|
85
|
-
},
|
|
86
|
-
)
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
async def delete_space(client) -> None:
|
|
90
|
-
headers = {"Content-Type": "application/json"}
|
|
91
|
-
endpoint = "/managed/request"
|
|
92
|
-
request_data = {
|
|
93
|
-
"space_name": DEMO_SPACE,
|
|
94
|
-
"request_type": RequestType.delete,
|
|
95
|
-
"records": [
|
|
96
|
-
{
|
|
97
|
-
"resource_type": ResourceType.space,
|
|
98
|
-
"subpath": "/",
|
|
99
|
-
"shortname": DEMO_SPACE,
|
|
100
|
-
"attributes": {},
|
|
101
|
-
}
|
|
102
|
-
],
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
assert_code_and_status_success(
|
|
106
|
-
await client.post(endpoint, json=request_data, headers=headers)
|
|
107
|
-
)
|
|
108
|
-
check_not_found(
|
|
109
|
-
await client.get(f"/managed/entry/space/{DEMO_SPACE}/__root__/{DEMO_SPACE}")
|
|
110
|
-
)
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
def check_repeated_shortname(response):
|
|
114
|
-
json_response = response.json()
|
|
115
|
-
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
|
116
|
-
assert "failed" == json_response.get("status")
|
|
117
|
-
assert "request" == json_response.get("error", {}).get("type")
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def check_not_found(response):
|
|
121
|
-
json_response = response.json()
|
|
122
|
-
assert response.status_code == status.HTTP_404_NOT_FOUND
|
|
123
|
-
assert "failed" == json_response.get("status")
|
|
124
|
-
assert "db" == json_response.get("error").get("type")
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
def check_unauthorized(response):
|
|
128
|
-
json_response = response.json()
|
|
129
|
-
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
|
130
|
-
assert "failed" == json_response.get("status")
|
|
131
|
-
assert "auth" == json_response.get("error", {}).get("type")
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
def assert_code_and_status_success(response):
|
|
135
|
-
if response.status_code != status.HTTP_200_OK:
|
|
136
|
-
print(
|
|
137
|
-
"\n\n\n\n\n========================= ERROR RESPONSE: =========================n:",
|
|
138
|
-
response.json(),
|
|
139
|
-
"\n\n\n\n\n",
|
|
140
|
-
)
|
|
141
|
-
json_response = response.json()
|
|
142
|
-
print(f"{json_response=}")
|
|
143
|
-
assert response.status_code == status.HTTP_200_OK
|
|
144
|
-
assert json_response.get("status") == "success"
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
def assert_bad_request(response):
|
|
148
|
-
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
|
149
|
-
assert response.json()["status"] == "failed"
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
async def assert_resource_created(
|
|
153
|
-
client,
|
|
154
|
-
query: Query,
|
|
155
|
-
res_shortname: str,
|
|
156
|
-
res_subpath: str,
|
|
157
|
-
res_attributes: dict | None = None,
|
|
158
|
-
res_attachments: dict[str, int] | None = None,
|
|
159
|
-
):
|
|
160
|
-
if not query.search:
|
|
161
|
-
query.search = ""
|
|
162
|
-
response = await client.post(
|
|
163
|
-
"/managed/query",
|
|
164
|
-
json=query.model_dump(exclude_none=True),
|
|
165
|
-
)
|
|
166
|
-
assert_code_and_status_success(response)
|
|
167
|
-
json_response = response.json()
|
|
168
|
-
assert json_response["status"] == "success"
|
|
169
|
-
assert json_response["attributes"]["returned"] == query.limit
|
|
170
|
-
assert json_response["records"][0]["shortname"] == res_shortname
|
|
171
|
-
assert json_response["records"][0]["subpath"] in [res_subpath, f"/{res_subpath}"]
|
|
172
|
-
if res_attributes:
|
|
173
|
-
if "is_active" not in res_attributes:
|
|
174
|
-
res_attributes["is_active"] = False
|
|
175
|
-
if "tags" not in res_attributes:
|
|
176
|
-
res_attributes["tags"] = []
|
|
177
|
-
res_attributes["owner_shortname"] = "dmart"
|
|
178
|
-
|
|
179
|
-
json_response["records"][0]["attributes"].pop("created_at", None)
|
|
180
|
-
json_response["records"][0]["attributes"].pop("updated_at", None)
|
|
181
|
-
assert (
|
|
182
|
-
json_response["records"][0]["attributes"]["payload"]["body"]
|
|
183
|
-
== res_attributes["payload"]["body"]
|
|
184
|
-
)
|
|
185
|
-
|
|
186
|
-
# Assert correct attachments number for each attachment type returned
|
|
187
|
-
if res_attachments:
|
|
188
|
-
for attachment_key, attachments in json_response["records"][0][
|
|
189
|
-
"attachments"
|
|
190
|
-
].items():
|
|
191
|
-
if attachment_key in res_attachments:
|
|
192
|
-
assert len(attachments) == res_attachments[attachment_key]
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
async def assert_resource_deleted(client, space: str, subpath: str, shortname: str):
|
|
196
|
-
query = Query(
|
|
197
|
-
type=QueryType.search,
|
|
198
|
-
space_name=space,
|
|
199
|
-
subpath=subpath,
|
|
200
|
-
search="",
|
|
201
|
-
filter_shortnames=[shortname],
|
|
202
|
-
retrieve_json_payload=True,
|
|
203
|
-
limit=1,
|
|
204
|
-
)
|
|
205
|
-
response = await client.post("/managed/query", json=query.model_dump(exclude_none=True))
|
|
206
|
-
assert_code_and_status_success(response)
|
|
207
|
-
assert response.json()["status"] == "success"
|
|
208
|
-
assert response.json()["attributes"]["returned"] == 0
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
async def upload_resource_with_payload(
|
|
212
|
-
client,
|
|
213
|
-
space_name,
|
|
214
|
-
record_path: str,
|
|
215
|
-
payload_path: str,
|
|
216
|
-
payload_type,
|
|
217
|
-
attachment=False,
|
|
218
|
-
is_fail=False,
|
|
219
|
-
):
|
|
220
|
-
with open(record_path, "rb") as request_file, open(
|
|
221
|
-
payload_path, "rb"
|
|
222
|
-
) as media_file:
|
|
223
|
-
files = {
|
|
224
|
-
"request_record": ("record.json", request_file, "application/json"),
|
|
225
|
-
"payload_file": (media_file.name.split("/")[-1], media_file, payload_type),
|
|
226
|
-
}
|
|
227
|
-
response = await client.post(
|
|
228
|
-
"managed/resource_with_payload",
|
|
229
|
-
headers={},
|
|
230
|
-
data={"space_name": space_name},
|
|
231
|
-
files=files,
|
|
232
|
-
)
|
|
233
|
-
|
|
234
|
-
if is_fail:
|
|
235
|
-
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
|
236
|
-
else:
|
|
237
|
-
assert_code_and_status_success(response)
|
|
238
|
-
|
|
239
|
-
if attachment:
|
|
240
|
-
with open(record_path, 'r') as record_file:
|
|
241
|
-
record_data = json.loads(record_file.read())
|
|
242
|
-
subpath_parts = record_data["subpath"].split('/')
|
|
243
|
-
attach_parent_subpath, attach_parent_shortname = "/".join(subpath_parts[:-1]), subpath_parts[-1]
|
|
244
|
-
await assert_resource_created(
|
|
245
|
-
client,
|
|
246
|
-
query=Query(
|
|
247
|
-
type=QueryType.search,
|
|
248
|
-
space_name=space_name,
|
|
249
|
-
subpath=attach_parent_subpath,
|
|
250
|
-
filter_shortnames=[attach_parent_shortname],
|
|
251
|
-
retrieve_json_payload=True,
|
|
252
|
-
retrieve_attachments=True,
|
|
253
|
-
limit=1,
|
|
254
|
-
),
|
|
255
|
-
res_shortname=attach_parent_shortname,
|
|
256
|
-
res_subpath=attach_parent_subpath,
|
|
257
|
-
res_attachments={"media": 1},
|
|
258
|
-
)
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
async def delete_resource(client, resource_type: str, del_subpath: str, del_shortname: str):
|
|
262
|
-
headers = {"Content-Type": "application/json"}
|
|
263
|
-
endpoint = "/managed/request"
|
|
264
|
-
request_data = {
|
|
265
|
-
"space_name": DEMO_SPACE,
|
|
266
|
-
"request_type": RequestType.delete,
|
|
267
|
-
"records": [
|
|
268
|
-
{
|
|
269
|
-
"resource_type": resource_type,
|
|
270
|
-
"subpath": del_subpath,
|
|
271
|
-
"shortname": del_shortname,
|
|
272
|
-
"attributes": {},
|
|
273
|
-
}
|
|
274
|
-
],
|
|
275
|
-
}
|
|
276
|
-
|
|
277
|
-
response = await client.post(endpoint, json=request_data, headers=headers)
|
|
278
|
-
assert_code_and_status_success(response)
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
async def retrieve_content_folder(client):
|
|
282
|
-
response = await client.get(f"managed/entry/folder/{DEMO_SPACE}/{settings.root_subpath_mw}/{DEMO_SUBPATH}")
|
|
283
|
-
|
|
284
|
-
assert response.status_code == status.HTTP_200_OK
|
|
285
|
-
|
|
286
|
-
await assert_resource_created(
|
|
287
|
-
client,
|
|
288
|
-
query=Query(
|
|
289
|
-
type=QueryType.search,
|
|
290
|
-
space_name=DEMO_SPACE,
|
|
291
|
-
subpath="/",
|
|
292
|
-
filter_shortnames=[DEMO_SUBPATH],
|
|
293
|
-
filter_types=[ResourceType.folder],
|
|
294
|
-
retrieve_json_payload=True,
|
|
295
|
-
limit=1,
|
|
296
|
-
),
|
|
297
|
-
res_shortname=DEMO_SUBPATH,
|
|
298
|
-
res_subpath="/",
|
|
299
|
-
res_attributes={},
|
|
300
|
-
)
|
pytests/get_settings_test.py
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
# test_script.py
|
|
2
|
-
import subprocess
|
|
3
|
-
import pytest
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
@pytest.mark.run(order=4)
|
|
7
|
-
def test_script_execution():
|
|
8
|
-
result = subprocess.run(
|
|
9
|
-
['python3', 'get_settings.py'],
|
|
10
|
-
capture_output=True,
|
|
11
|
-
text=True,
|
|
12
|
-
check=True
|
|
13
|
-
)
|
|
14
|
-
assert result.returncode == 0
|
|
@@ -1,237 +0,0 @@
|
|
|
1
|
-
import time
|
|
2
|
-
from uuid import uuid4
|
|
3
|
-
import pytest
|
|
4
|
-
import os
|
|
5
|
-
import json
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
from sqlmodel import Session, create_engine, text, SQLModel
|
|
8
|
-
from data_adapters.sql.create_tables import Attachments, Entries, Spaces, Histories
|
|
9
|
-
from sqlalchemy.exc import OperationalError
|
|
10
|
-
from utils.settings import settings
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def subpath_checker(subpath: str):
|
|
14
|
-
if subpath.endswith("/"):
|
|
15
|
-
subpath = subpath[:-1]
|
|
16
|
-
if not subpath.startswith("/"):
|
|
17
|
-
subpath = '/' + subpath
|
|
18
|
-
return subpath
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
def connect_with_retry(engine, retries=5, delay=2):
|
|
22
|
-
"""
|
|
23
|
-
Try to connect to the database with retries.
|
|
24
|
-
"""
|
|
25
|
-
for attempt in range(retries):
|
|
26
|
-
try:
|
|
27
|
-
with engine.connect() as _:
|
|
28
|
-
print(f"Connected to the database on attempt {attempt + 1}")
|
|
29
|
-
return
|
|
30
|
-
except OperationalError as e:
|
|
31
|
-
print(f"Connection attempt {attempt + 1} failed: {e}")
|
|
32
|
-
time.sleep(delay)
|
|
33
|
-
raise Exception("Could not connect to the database after multiple attempts")
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
@pytest.fixture(scope="module")
|
|
37
|
-
def setup_database():
|
|
38
|
-
if settings.active_data_db == "file":
|
|
39
|
-
pytest.skip("Skipping test for file-based database")
|
|
40
|
-
return
|
|
41
|
-
|
|
42
|
-
# Use the settings to connect with the main `postgres` user
|
|
43
|
-
postgresql_url = f"{settings.database_driver.replace('+asyncpg','+psycopg')}://{settings.database_username}:{settings.database_password}@{settings.database_host}:{settings.database_port}"
|
|
44
|
-
engine = create_engine(f"{postgresql_url}/postgres", echo=False, isolation_level="AUTOCOMMIT")
|
|
45
|
-
|
|
46
|
-
# Create the database
|
|
47
|
-
with Session(engine) as session:
|
|
48
|
-
try:
|
|
49
|
-
session.exec(text(f"DROP DATABASE IF EXISTS {settings.database_name}"))
|
|
50
|
-
session.commit()
|
|
51
|
-
session.exec(text(f"CREATE DATABASE {settings.database_name}"))
|
|
52
|
-
session.commit() # Ensure the transaction is fully committed
|
|
53
|
-
print(f"Database {settings.database_name} created successfully")
|
|
54
|
-
except Exception as e:
|
|
55
|
-
print(f"Database creation failed: {e}")
|
|
56
|
-
|
|
57
|
-
# Add a small delay to ensure the database is fully ready
|
|
58
|
-
time.sleep(2)
|
|
59
|
-
|
|
60
|
-
yield
|
|
61
|
-
|
|
62
|
-
# Drop the database after tests
|
|
63
|
-
with Session(engine) as session:
|
|
64
|
-
try:
|
|
65
|
-
session.exec(text(f"DROP DATABASE IF EXISTS {settings.database_name}"))
|
|
66
|
-
session.commit()
|
|
67
|
-
print(f"Database {settings.database_name} dropped successfully")
|
|
68
|
-
except Exception as e:
|
|
69
|
-
print(f"Database deletion failed: {e}")
|
|
70
|
-
|
|
71
|
-
engine.dispose()
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
@pytest.fixture(scope="module")
|
|
75
|
-
def setup_environment(setup_database):
|
|
76
|
-
if settings.active_data_db == "file":
|
|
77
|
-
pytest.skip("Skipping test for file-based database")
|
|
78
|
-
return
|
|
79
|
-
|
|
80
|
-
# Set the database name from settings
|
|
81
|
-
driver = settings.database_driver.replace('+asyncpg', '+psycopg')
|
|
82
|
-
postgresql_url = f"{driver}://{settings.database_username}:{settings.database_password}@{settings.database_host}:{settings.database_port}"
|
|
83
|
-
engine = create_engine(f"{postgresql_url}/{settings.database_name}", echo=False)
|
|
84
|
-
|
|
85
|
-
# Retry connecting to the newly created database
|
|
86
|
-
connect_with_retry(engine)
|
|
87
|
-
|
|
88
|
-
# Generate tables after ensuring connection
|
|
89
|
-
postgresql_url = f"{driver}://{settings.database_username}:{settings.database_password}@{settings.database_host}:{settings.database_port}/{settings.database_name}"
|
|
90
|
-
engine = create_engine(postgresql_url, echo=False)
|
|
91
|
-
SQLModel.metadata.create_all(engine)
|
|
92
|
-
|
|
93
|
-
yield engine
|
|
94
|
-
|
|
95
|
-
engine.dispose()
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
def test_json_to_db_migration(setup_environment):
|
|
99
|
-
if settings.active_data_db == "file":
|
|
100
|
-
pytest.skip("Skipping test for file-based database")
|
|
101
|
-
return
|
|
102
|
-
|
|
103
|
-
engine = setup_environment
|
|
104
|
-
|
|
105
|
-
# Create a complex mock directory structure and files for different entry types
|
|
106
|
-
os.makedirs('/tmp/test_space/.dm', exist_ok=True)
|
|
107
|
-
with open('/tmp/test_space/.dm/meta.space.json', 'w') as f:
|
|
108
|
-
json.dump({"key": "value"}, f)
|
|
109
|
-
|
|
110
|
-
# Create more directories and files for the migration
|
|
111
|
-
os.makedirs('/tmp/test_space/dir1', exist_ok=True)
|
|
112
|
-
with open('/tmp/test_space/dir1/history.jsonl', 'w') as f:
|
|
113
|
-
f.write(json.dumps({"key": "history"}) + '\n')
|
|
114
|
-
|
|
115
|
-
# Create attachments folder and files
|
|
116
|
-
os.makedirs('/tmp/test_space/dir1/attachments', exist_ok=True)
|
|
117
|
-
with open('/tmp/test_space/dir1/attachments/meta.attachments.json', 'w') as f:
|
|
118
|
-
json.dump({
|
|
119
|
-
"uuid": str(uuid4()),
|
|
120
|
-
"space_name": "test_space",
|
|
121
|
-
"subpath": "/dir1",
|
|
122
|
-
"acl": [],
|
|
123
|
-
"relationships": [],
|
|
124
|
-
"payload": {"body": "attachment content"}
|
|
125
|
-
}, f)
|
|
126
|
-
|
|
127
|
-
# Create ticket-related file
|
|
128
|
-
with open('/tmp/test_space/dir1/meta.ticket.json', 'w') as f:
|
|
129
|
-
json.dump({
|
|
130
|
-
"state": "open",
|
|
131
|
-
"is_open": True,
|
|
132
|
-
"reporter": "user1",
|
|
133
|
-
"subpath": "/dir1/ticket"
|
|
134
|
-
}, f)
|
|
135
|
-
|
|
136
|
-
# Create user meta file
|
|
137
|
-
with open('/tmp/test_space/.dm/meta.user.json', 'w') as f:
|
|
138
|
-
json.dump({
|
|
139
|
-
"resource_type": "user",
|
|
140
|
-
"firebase_token": "firebase_token",
|
|
141
|
-
"language": "en"
|
|
142
|
-
}, f)
|
|
143
|
-
|
|
144
|
-
# Create role meta file
|
|
145
|
-
with open('/tmp/test_space/.dm/meta.role.json', 'w') as f:
|
|
146
|
-
json.dump({
|
|
147
|
-
"resource_type": "role",
|
|
148
|
-
"permissions": ["read", "write"]
|
|
149
|
-
}, f)
|
|
150
|
-
|
|
151
|
-
# Create permission meta file
|
|
152
|
-
with open('/tmp/test_space/.dm/meta.permission.json', 'w') as f:
|
|
153
|
-
json.dump({
|
|
154
|
-
"resource_type": "permission",
|
|
155
|
-
"subpaths": {"read": "/read", "write": "/write"},
|
|
156
|
-
"resource_types": ["user", "role"]
|
|
157
|
-
}, f)
|
|
158
|
-
|
|
159
|
-
# Run the migration script
|
|
160
|
-
try:
|
|
161
|
-
with Session(engine) as session:
|
|
162
|
-
for root, dirs, _ in os.walk('/tmp/test_space'):
|
|
163
|
-
tmp = root.replace('/tmp/test_space', '')
|
|
164
|
-
if tmp == '':
|
|
165
|
-
continue
|
|
166
|
-
if tmp[0] == '/':
|
|
167
|
-
tmp = tmp[1:]
|
|
168
|
-
space_name = tmp.split('/')[0]
|
|
169
|
-
subpath = '/'.join(tmp.split('/')[1:])
|
|
170
|
-
if space_name == '..':
|
|
171
|
-
continue
|
|
172
|
-
|
|
173
|
-
if space_name.startswith('.git'):
|
|
174
|
-
continue
|
|
175
|
-
|
|
176
|
-
if subpath == '' or subpath == '/':
|
|
177
|
-
subpath = '/'
|
|
178
|
-
p = os.path.join(root, '.dm', 'meta.space.json')
|
|
179
|
-
entry = {}
|
|
180
|
-
if Path(p).is_file():
|
|
181
|
-
entry = json.load(open(p))
|
|
182
|
-
entry['space_name'] = space_name
|
|
183
|
-
entry['subpath'] = '/'
|
|
184
|
-
session.add(Spaces.model_validate(entry))
|
|
185
|
-
continue
|
|
186
|
-
|
|
187
|
-
subpath = subpath.replace('.dm', '')
|
|
188
|
-
if subpath != '/' and subpath.endswith('/'):
|
|
189
|
-
subpath = subpath[:-1]
|
|
190
|
-
|
|
191
|
-
if subpath == '':
|
|
192
|
-
subpath = '/'
|
|
193
|
-
|
|
194
|
-
for dir in dirs:
|
|
195
|
-
for file in os.listdir(os.path.join(root, dir)):
|
|
196
|
-
if not file.startswith('meta'):
|
|
197
|
-
if file == 'history.jsonl':
|
|
198
|
-
lines = open(os.path.join(root, dir, file), 'r').readlines()
|
|
199
|
-
for line in lines:
|
|
200
|
-
history = json.loads(line)
|
|
201
|
-
history['shortname'] = dir
|
|
202
|
-
history['space_name'] = space_name
|
|
203
|
-
history['subpath'] = subpath_checker(subpath)
|
|
204
|
-
session.add(Histories.model_validate(history))
|
|
205
|
-
continue
|
|
206
|
-
|
|
207
|
-
p = os.path.join(root, dir, file)
|
|
208
|
-
if Path(p).is_file():
|
|
209
|
-
if 'attachments' in p:
|
|
210
|
-
_attachment = json.load(open(os.path.join(root, dir, file)))
|
|
211
|
-
_attachment['space_name'] = space_name
|
|
212
|
-
_attachment['uuid'] = _attachment.get('uuid', uuid4())
|
|
213
|
-
_attachment['subpath'] = subpath_checker(_attachment['subpath'])
|
|
214
|
-
session.add(Attachments.model_validate(_attachment))
|
|
215
|
-
elif file.endswith('.json'):
|
|
216
|
-
entry = json.load(open(p))
|
|
217
|
-
entry['space_name'] = space_name
|
|
218
|
-
entry['subpath'] = subpath_checker(subpath)
|
|
219
|
-
session.add(Entries.model_validate(entry))
|
|
220
|
-
session.commit()
|
|
221
|
-
assert True # Assert that the migration completes without error
|
|
222
|
-
except Exception as e:
|
|
223
|
-
print(f"Migration failed: {e}")
|
|
224
|
-
assert False # Fail the test if there is any exception
|
|
225
|
-
|
|
226
|
-
# Clean up the mock directory structure
|
|
227
|
-
os.remove('/tmp/test_space/.dm/meta.space.json')
|
|
228
|
-
os.remove('/tmp/test_space/dir1/history.jsonl')
|
|
229
|
-
os.remove('/tmp/test_space/dir1/attachments/meta.attachments.json')
|
|
230
|
-
os.remove('/tmp/test_space/dir1/meta.ticket.json')
|
|
231
|
-
os.remove('/tmp/test_space/.dm/meta.user.json')
|
|
232
|
-
os.remove('/tmp/test_space/.dm/meta.role.json')
|
|
233
|
-
os.remove('/tmp/test_space/.dm/meta.permission.json')
|
|
234
|
-
os.rmdir('/tmp/test_space/dir1/attachments')
|
|
235
|
-
os.rmdir('/tmp/test_space/.dm')
|
|
236
|
-
os.rmdir('/tmp/test_space/dir1')
|
|
237
|
-
os.rmdir('/tmp/test_space')
|
pytests/service_test.py
DELETED
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
from api.user.service import gen_alphanumeric
|
|
2
|
-
import pytest
|
|
3
|
-
|
|
4
|
-
@pytest.mark.run(order=9)
|
|
5
|
-
def test_gen_alphanumeric_length():
|
|
6
|
-
# Test default length
|
|
7
|
-
result = gen_alphanumeric()
|
|
8
|
-
assert len(result) == 16, "Default length should be 16"
|
|
9
|
-
|
|
10
|
-
# Test custom length
|
|
11
|
-
length = 32
|
|
12
|
-
result = gen_alphanumeric(length)
|
|
13
|
-
assert len(result) == length, f"Length should be {length}"
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
@pytest.mark.run(order=9)
|
|
17
|
-
def test_gen_alphanumeric_characters():
|
|
18
|
-
result = gen_alphanumeric()
|
|
19
|
-
assert all(c.isalnum() for c in result), "Result should only contain alphanumeric characters"
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
@pytest.mark.run(order=9)
|
|
23
|
-
def test_gen_alphanumeric_unique():
|
|
24
|
-
num_samples = 100
|
|
25
|
-
samples = {gen_alphanumeric() for _ in range(num_samples)}
|
|
26
|
-
assert len(samples) == num_samples, "Generated strings should be unique"
|
pytests/test_info.py
DELETED
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
from httpx import AsyncClient
|
|
2
|
-
from pytests.base_test import get_superman_cookie
|
|
3
|
-
from fastapi import status
|
|
4
|
-
import pytest
|
|
5
|
-
from utils.internal_error_code import InternalErrorCode
|
|
6
|
-
from utils.jwt import sign_jwt
|
|
7
|
-
from utils.settings import settings
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
@pytest.mark.run(order=6)
|
|
12
|
-
@pytest.mark.anyio
|
|
13
|
-
async def test_info_me(client: AsyncClient) -> None:
|
|
14
|
-
|
|
15
|
-
client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
16
|
-
response = await client.get("/info/me")
|
|
17
|
-
assert response.status_code == status.HTTP_200_OK
|
|
18
|
-
json_response = response.json()
|
|
19
|
-
assert json_response["status"] == "success"
|
|
20
|
-
|
|
21
|
-
@pytest.mark.run(order=6)
|
|
22
|
-
@pytest.mark.anyio
|
|
23
|
-
async def test_info_manifest(client: AsyncClient) -> None:
|
|
24
|
-
|
|
25
|
-
client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
26
|
-
response = await client.get("/info/manifest")
|
|
27
|
-
assert response.status_code == status.HTTP_200_OK
|
|
28
|
-
json_response = response.json()
|
|
29
|
-
assert json_response["status"] == "success"
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@pytest.mark.run(order=6)
|
|
33
|
-
@pytest.mark.anyio
|
|
34
|
-
async def test_get_settings_should_pass(client: AsyncClient) -> None:
|
|
35
|
-
client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
36
|
-
response = await client.get("/info/settings")
|
|
37
|
-
assert response.status_code == status.HTTP_200_OK
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
# @pytest.mark.run(order=6)
|
|
41
|
-
# @pytest.mark.anyio
|
|
42
|
-
# async def test_in_loop_tasks(client: AsyncClient) -> None:
|
|
43
|
-
# client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
44
|
-
# response = await client.get("/info/in-loop-tasks")
|
|
45
|
-
# assert response.status_code == status.HTTP_200_OK
|
|
46
|
-
# json_response = response.json()
|
|
47
|
-
# assert json_response["status"] == "success"
|
|
48
|
-
# assert "tasks_count" in json_response["attributes"]
|
|
49
|
-
# assert isinstance(json_response["attributes"]["tasks_count"], int)
|
|
50
|
-
# assert "tasks" in json_response["attributes"]
|
|
51
|
-
# assert isinstance(json_response["attributes"]["tasks"], list)
|
|
52
|
-
# for task in json_response["attributes"]["tasks"]:
|
|
53
|
-
# assert "name" in task
|
|
54
|
-
# assert "coroutine" in task
|
|
55
|
-
# assert "stack" in task
|
pytests/test_status.py
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
# from pytests.base_test import client
|
|
3
|
-
from fastapi import status
|
|
4
|
-
from httpx import AsyncClient
|
|
5
|
-
import pytest
|
|
6
|
-
|
|
7
|
-
# @pytest.mark.asyncio(scope="session")
|
|
8
|
-
@pytest.mark.anyio
|
|
9
|
-
async def test_sanity(client: AsyncClient) -> None:
|
|
10
|
-
# async with my_client as client:
|
|
11
|
-
response = await client.get("/")
|
|
12
|
-
assert response.status_code == status.HTTP_200_OK
|
|
13
|
-
json_response = response.json()
|
|
14
|
-
assert json_response["status"] == "success"
|
|
15
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|