dmart 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alembic/__init__.py +0 -0
- alembic/env.py +91 -0
- alembic/scripts/__init__.py +0 -0
- alembic/scripts/calculate_checksums.py +77 -0
- alembic/scripts/migration_f7a4949eed19.py +28 -0
- alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
- alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
- alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- alembic/versions/74288ccbd3b5_initial.py +264 -0
- alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
- alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- alembic/versions/__init__.py +0 -0
- alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- api/__init__.py +0 -0
- api/info/__init__.py +0 -0
- api/info/router.py +109 -0
- api/managed/__init__.py +0 -0
- api/managed/router.py +1541 -0
- api/managed/utils.py +1850 -0
- api/public/__init__.py +0 -0
- api/public/router.py +758 -0
- api/qr/__init__.py +0 -0
- api/qr/router.py +108 -0
- api/user/__init__.py +0 -0
- api/user/model/__init__.py +0 -0
- api/user/model/errors.py +14 -0
- api/user/model/requests.py +165 -0
- api/user/model/responses.py +11 -0
- api/user/router.py +1401 -0
- api/user/service.py +270 -0
- bundler.py +44 -0
- config/__init__.py +0 -0
- config/channels.json +11 -0
- config/notification.json +17 -0
- data_adapters/__init__.py +0 -0
- data_adapters/adapter.py +16 -0
- data_adapters/base_data_adapter.py +467 -0
- data_adapters/file/__init__.py +0 -0
- data_adapters/file/adapter.py +2043 -0
- data_adapters/file/adapter_helpers.py +1013 -0
- data_adapters/file/archive.py +150 -0
- data_adapters/file/create_index.py +331 -0
- data_adapters/file/create_users_folders.py +52 -0
- data_adapters/file/custom_validations.py +68 -0
- data_adapters/file/drop_index.py +40 -0
- data_adapters/file/health_check.py +560 -0
- data_adapters/file/redis_services.py +1110 -0
- data_adapters/helpers.py +27 -0
- data_adapters/sql/__init__.py +0 -0
- data_adapters/sql/adapter.py +3210 -0
- data_adapters/sql/adapter_helpers.py +491 -0
- data_adapters/sql/create_tables.py +451 -0
- data_adapters/sql/create_users_folders.py +53 -0
- data_adapters/sql/db_to_json_migration.py +482 -0
- data_adapters/sql/health_check_sql.py +232 -0
- data_adapters/sql/json_to_db_migration.py +454 -0
- data_adapters/sql/update_query_policies.py +101 -0
- data_generator.py +81 -0
- dmart-0.1.9.dist-info/METADATA +64 -0
- dmart-0.1.9.dist-info/RECORD +149 -0
- dmart-0.1.9.dist-info/WHEEL +5 -0
- dmart-0.1.9.dist-info/entry_points.txt +2 -0
- dmart-0.1.9.dist-info/top_level.txt +23 -0
- dmart.py +513 -0
- get_settings.py +7 -0
- languages/__init__.py +0 -0
- languages/arabic.json +15 -0
- languages/english.json +16 -0
- languages/kurdish.json +14 -0
- languages/loader.py +13 -0
- main.py +506 -0
- migrate.py +24 -0
- models/__init__.py +0 -0
- models/api.py +203 -0
- models/core.py +597 -0
- models/enums.py +255 -0
- password_gen.py +8 -0
- plugins/__init__.py +0 -0
- plugins/action_log/__init__.py +0 -0
- plugins/action_log/plugin.py +121 -0
- plugins/admin_notification_sender/__init__.py +0 -0
- plugins/admin_notification_sender/plugin.py +124 -0
- plugins/ldap_manager/__init__.py +0 -0
- plugins/ldap_manager/plugin.py +100 -0
- plugins/local_notification/__init__.py +0 -0
- plugins/local_notification/plugin.py +123 -0
- plugins/realtime_updates_notifier/__init__.py +0 -0
- plugins/realtime_updates_notifier/plugin.py +58 -0
- plugins/redis_db_update/__init__.py +0 -0
- plugins/redis_db_update/plugin.py +188 -0
- plugins/resource_folders_creation/__init__.py +0 -0
- plugins/resource_folders_creation/plugin.py +81 -0
- plugins/system_notification_sender/__init__.py +0 -0
- plugins/system_notification_sender/plugin.py +188 -0
- plugins/update_access_controls/__init__.py +0 -0
- plugins/update_access_controls/plugin.py +9 -0
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +16 -0
- pytests/api_user_models_requests_test.py +98 -0
- pytests/archive_test.py +72 -0
- pytests/base_test.py +300 -0
- pytests/get_settings_test.py +14 -0
- pytests/json_to_db_migration_test.py +237 -0
- pytests/service_test.py +26 -0
- pytests/test_info.py +55 -0
- pytests/test_status.py +15 -0
- run_notification_campaign.py +98 -0
- scheduled_notification_handler.py +121 -0
- schema_migration.py +208 -0
- schema_modulate.py +192 -0
- set_admin_passwd.py +55 -0
- sync.py +202 -0
- utils/__init__.py +0 -0
- utils/access_control.py +306 -0
- utils/async_request.py +8 -0
- utils/exporter.py +309 -0
- utils/firebase_notifier.py +57 -0
- utils/generate_email.py +38 -0
- utils/helpers.py +352 -0
- utils/hypercorn_config.py +12 -0
- utils/internal_error_code.py +60 -0
- utils/jwt.py +124 -0
- utils/logger.py +167 -0
- utils/middleware.py +99 -0
- utils/notification.py +75 -0
- utils/password_hashing.py +16 -0
- utils/plugin_manager.py +215 -0
- utils/query_policies_helper.py +112 -0
- utils/regex.py +44 -0
- utils/repository.py +529 -0
- utils/router_helper.py +19 -0
- utils/settings.py +165 -0
- utils/sms_notifier.py +21 -0
- utils/social_sso.py +67 -0
- utils/templates/activation.html.j2 +26 -0
- utils/templates/reminder.html.j2 +17 -0
- utils/ticket_sys_utils.py +203 -0
- utils/web_notifier.py +29 -0
- websocket.py +231 -0
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from uuid import uuid4
|
|
3
|
+
import pytest
|
|
4
|
+
import os
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from sqlmodel import Session, create_engine, text, SQLModel
|
|
8
|
+
from data_adapters.sql.create_tables import Attachments, Entries, Spaces, Histories
|
|
9
|
+
from sqlalchemy.exc import OperationalError
|
|
10
|
+
from utils.settings import settings
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def subpath_checker(subpath: str):
|
|
14
|
+
if subpath.endswith("/"):
|
|
15
|
+
subpath = subpath[:-1]
|
|
16
|
+
if not subpath.startswith("/"):
|
|
17
|
+
subpath = '/' + subpath
|
|
18
|
+
return subpath
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def connect_with_retry(engine, retries=5, delay=2):
|
|
22
|
+
"""
|
|
23
|
+
Try to connect to the database with retries.
|
|
24
|
+
"""
|
|
25
|
+
for attempt in range(retries):
|
|
26
|
+
try:
|
|
27
|
+
with engine.connect() as _:
|
|
28
|
+
print(f"Connected to the database on attempt {attempt + 1}")
|
|
29
|
+
return
|
|
30
|
+
except OperationalError as e:
|
|
31
|
+
print(f"Connection attempt {attempt + 1} failed: {e}")
|
|
32
|
+
time.sleep(delay)
|
|
33
|
+
raise Exception("Could not connect to the database after multiple attempts")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@pytest.fixture(scope="module")
|
|
37
|
+
def setup_database():
|
|
38
|
+
if settings.active_data_db == "file":
|
|
39
|
+
pytest.skip("Skipping test for file-based database")
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
# Use the settings to connect with the main `postgres` user
|
|
43
|
+
postgresql_url = f"{settings.database_driver.replace('+asyncpg','+psycopg')}://{settings.database_username}:{settings.database_password}@{settings.database_host}:{settings.database_port}"
|
|
44
|
+
engine = create_engine(f"{postgresql_url}/postgres", echo=False, isolation_level="AUTOCOMMIT")
|
|
45
|
+
|
|
46
|
+
# Create the database
|
|
47
|
+
with Session(engine) as session:
|
|
48
|
+
try:
|
|
49
|
+
session.exec(text(f"DROP DATABASE IF EXISTS {settings.database_name}"))
|
|
50
|
+
session.commit()
|
|
51
|
+
session.exec(text(f"CREATE DATABASE {settings.database_name}"))
|
|
52
|
+
session.commit() # Ensure the transaction is fully committed
|
|
53
|
+
print(f"Database {settings.database_name} created successfully")
|
|
54
|
+
except Exception as e:
|
|
55
|
+
print(f"Database creation failed: {e}")
|
|
56
|
+
|
|
57
|
+
# Add a small delay to ensure the database is fully ready
|
|
58
|
+
time.sleep(2)
|
|
59
|
+
|
|
60
|
+
yield
|
|
61
|
+
|
|
62
|
+
# Drop the database after tests
|
|
63
|
+
with Session(engine) as session:
|
|
64
|
+
try:
|
|
65
|
+
session.exec(text(f"DROP DATABASE IF EXISTS {settings.database_name}"))
|
|
66
|
+
session.commit()
|
|
67
|
+
print(f"Database {settings.database_name} dropped successfully")
|
|
68
|
+
except Exception as e:
|
|
69
|
+
print(f"Database deletion failed: {e}")
|
|
70
|
+
|
|
71
|
+
engine.dispose()
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@pytest.fixture(scope="module")
|
|
75
|
+
def setup_environment(setup_database):
|
|
76
|
+
if settings.active_data_db == "file":
|
|
77
|
+
pytest.skip("Skipping test for file-based database")
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
# Set the database name from settings
|
|
81
|
+
driver = settings.database_driver.replace('+asyncpg', '+psycopg')
|
|
82
|
+
postgresql_url = f"{driver}://{settings.database_username}:{settings.database_password}@{settings.database_host}:{settings.database_port}"
|
|
83
|
+
engine = create_engine(f"{postgresql_url}/{settings.database_name}", echo=False)
|
|
84
|
+
|
|
85
|
+
# Retry connecting to the newly created database
|
|
86
|
+
connect_with_retry(engine)
|
|
87
|
+
|
|
88
|
+
# Generate tables after ensuring connection
|
|
89
|
+
postgresql_url = f"{driver}://{settings.database_username}:{settings.database_password}@{settings.database_host}:{settings.database_port}/{settings.database_name}"
|
|
90
|
+
engine = create_engine(postgresql_url, echo=False)
|
|
91
|
+
SQLModel.metadata.create_all(engine)
|
|
92
|
+
|
|
93
|
+
yield engine
|
|
94
|
+
|
|
95
|
+
engine.dispose()
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_json_to_db_migration(setup_environment):
|
|
99
|
+
if settings.active_data_db == "file":
|
|
100
|
+
pytest.skip("Skipping test for file-based database")
|
|
101
|
+
return
|
|
102
|
+
|
|
103
|
+
engine = setup_environment
|
|
104
|
+
|
|
105
|
+
# Create a complex mock directory structure and files for different entry types
|
|
106
|
+
os.makedirs('/tmp/test_space/.dm', exist_ok=True)
|
|
107
|
+
with open('/tmp/test_space/.dm/meta.space.json', 'w') as f:
|
|
108
|
+
json.dump({"key": "value"}, f)
|
|
109
|
+
|
|
110
|
+
# Create more directories and files for the migration
|
|
111
|
+
os.makedirs('/tmp/test_space/dir1', exist_ok=True)
|
|
112
|
+
with open('/tmp/test_space/dir1/history.jsonl', 'w') as f:
|
|
113
|
+
f.write(json.dumps({"key": "history"}) + '\n')
|
|
114
|
+
|
|
115
|
+
# Create attachments folder and files
|
|
116
|
+
os.makedirs('/tmp/test_space/dir1/attachments', exist_ok=True)
|
|
117
|
+
with open('/tmp/test_space/dir1/attachments/meta.attachments.json', 'w') as f:
|
|
118
|
+
json.dump({
|
|
119
|
+
"uuid": str(uuid4()),
|
|
120
|
+
"space_name": "test_space",
|
|
121
|
+
"subpath": "/dir1",
|
|
122
|
+
"acl": [],
|
|
123
|
+
"relationships": [],
|
|
124
|
+
"payload": {"body": "attachment content"}
|
|
125
|
+
}, f)
|
|
126
|
+
|
|
127
|
+
# Create ticket-related file
|
|
128
|
+
with open('/tmp/test_space/dir1/meta.ticket.json', 'w') as f:
|
|
129
|
+
json.dump({
|
|
130
|
+
"state": "open",
|
|
131
|
+
"is_open": True,
|
|
132
|
+
"reporter": "user1",
|
|
133
|
+
"subpath": "/dir1/ticket"
|
|
134
|
+
}, f)
|
|
135
|
+
|
|
136
|
+
# Create user meta file
|
|
137
|
+
with open('/tmp/test_space/.dm/meta.user.json', 'w') as f:
|
|
138
|
+
json.dump({
|
|
139
|
+
"resource_type": "user",
|
|
140
|
+
"firebase_token": "firebase_token",
|
|
141
|
+
"language": "en"
|
|
142
|
+
}, f)
|
|
143
|
+
|
|
144
|
+
# Create role meta file
|
|
145
|
+
with open('/tmp/test_space/.dm/meta.role.json', 'w') as f:
|
|
146
|
+
json.dump({
|
|
147
|
+
"resource_type": "role",
|
|
148
|
+
"permissions": ["read", "write"]
|
|
149
|
+
}, f)
|
|
150
|
+
|
|
151
|
+
# Create permission meta file
|
|
152
|
+
with open('/tmp/test_space/.dm/meta.permission.json', 'w') as f:
|
|
153
|
+
json.dump({
|
|
154
|
+
"resource_type": "permission",
|
|
155
|
+
"subpaths": {"read": "/read", "write": "/write"},
|
|
156
|
+
"resource_types": ["user", "role"]
|
|
157
|
+
}, f)
|
|
158
|
+
|
|
159
|
+
# Run the migration script
|
|
160
|
+
try:
|
|
161
|
+
with Session(engine) as session:
|
|
162
|
+
for root, dirs, _ in os.walk('/tmp/test_space'):
|
|
163
|
+
tmp = root.replace('/tmp/test_space', '')
|
|
164
|
+
if tmp == '':
|
|
165
|
+
continue
|
|
166
|
+
if tmp[0] == '/':
|
|
167
|
+
tmp = tmp[1:]
|
|
168
|
+
space_name = tmp.split('/')[0]
|
|
169
|
+
subpath = '/'.join(tmp.split('/')[1:])
|
|
170
|
+
if space_name == '..':
|
|
171
|
+
continue
|
|
172
|
+
|
|
173
|
+
if space_name.startswith('.git'):
|
|
174
|
+
continue
|
|
175
|
+
|
|
176
|
+
if subpath == '' or subpath == '/':
|
|
177
|
+
subpath = '/'
|
|
178
|
+
p = os.path.join(root, '.dm', 'meta.space.json')
|
|
179
|
+
entry = {}
|
|
180
|
+
if Path(p).is_file():
|
|
181
|
+
entry = json.load(open(p))
|
|
182
|
+
entry['space_name'] = space_name
|
|
183
|
+
entry['subpath'] = '/'
|
|
184
|
+
session.add(Spaces.model_validate(entry))
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
subpath = subpath.replace('.dm', '')
|
|
188
|
+
if subpath != '/' and subpath.endswith('/'):
|
|
189
|
+
subpath = subpath[:-1]
|
|
190
|
+
|
|
191
|
+
if subpath == '':
|
|
192
|
+
subpath = '/'
|
|
193
|
+
|
|
194
|
+
for dir in dirs:
|
|
195
|
+
for file in os.listdir(os.path.join(root, dir)):
|
|
196
|
+
if not file.startswith('meta'):
|
|
197
|
+
if file == 'history.jsonl':
|
|
198
|
+
lines = open(os.path.join(root, dir, file), 'r').readlines()
|
|
199
|
+
for line in lines:
|
|
200
|
+
history = json.loads(line)
|
|
201
|
+
history['shortname'] = dir
|
|
202
|
+
history['space_name'] = space_name
|
|
203
|
+
history['subpath'] = subpath_checker(subpath)
|
|
204
|
+
session.add(Histories.model_validate(history))
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
p = os.path.join(root, dir, file)
|
|
208
|
+
if Path(p).is_file():
|
|
209
|
+
if 'attachments' in p:
|
|
210
|
+
_attachment = json.load(open(os.path.join(root, dir, file)))
|
|
211
|
+
_attachment['space_name'] = space_name
|
|
212
|
+
_attachment['uuid'] = _attachment.get('uuid', uuid4())
|
|
213
|
+
_attachment['subpath'] = subpath_checker(_attachment['subpath'])
|
|
214
|
+
session.add(Attachments.model_validate(_attachment))
|
|
215
|
+
elif file.endswith('.json'):
|
|
216
|
+
entry = json.load(open(p))
|
|
217
|
+
entry['space_name'] = space_name
|
|
218
|
+
entry['subpath'] = subpath_checker(subpath)
|
|
219
|
+
session.add(Entries.model_validate(entry))
|
|
220
|
+
session.commit()
|
|
221
|
+
assert True # Assert that the migration completes without error
|
|
222
|
+
except Exception as e:
|
|
223
|
+
print(f"Migration failed: {e}")
|
|
224
|
+
assert False # Fail the test if there is any exception
|
|
225
|
+
|
|
226
|
+
# Clean up the mock directory structure
|
|
227
|
+
os.remove('/tmp/test_space/.dm/meta.space.json')
|
|
228
|
+
os.remove('/tmp/test_space/dir1/history.jsonl')
|
|
229
|
+
os.remove('/tmp/test_space/dir1/attachments/meta.attachments.json')
|
|
230
|
+
os.remove('/tmp/test_space/dir1/meta.ticket.json')
|
|
231
|
+
os.remove('/tmp/test_space/.dm/meta.user.json')
|
|
232
|
+
os.remove('/tmp/test_space/.dm/meta.role.json')
|
|
233
|
+
os.remove('/tmp/test_space/.dm/meta.permission.json')
|
|
234
|
+
os.rmdir('/tmp/test_space/dir1/attachments')
|
|
235
|
+
os.rmdir('/tmp/test_space/.dm')
|
|
236
|
+
os.rmdir('/tmp/test_space/dir1')
|
|
237
|
+
os.rmdir('/tmp/test_space')
|
pytests/service_test.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from api.user.service import gen_alphanumeric
|
|
2
|
+
import pytest
|
|
3
|
+
|
|
4
|
+
@pytest.mark.run(order=9)
|
|
5
|
+
def test_gen_alphanumeric_length():
|
|
6
|
+
# Test default length
|
|
7
|
+
result = gen_alphanumeric()
|
|
8
|
+
assert len(result) == 16, "Default length should be 16"
|
|
9
|
+
|
|
10
|
+
# Test custom length
|
|
11
|
+
length = 32
|
|
12
|
+
result = gen_alphanumeric(length)
|
|
13
|
+
assert len(result) == length, f"Length should be {length}"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@pytest.mark.run(order=9)
|
|
17
|
+
def test_gen_alphanumeric_characters():
|
|
18
|
+
result = gen_alphanumeric()
|
|
19
|
+
assert all(c.isalnum() for c in result), "Result should only contain alphanumeric characters"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@pytest.mark.run(order=9)
|
|
23
|
+
def test_gen_alphanumeric_unique():
|
|
24
|
+
num_samples = 100
|
|
25
|
+
samples = {gen_alphanumeric() for _ in range(num_samples)}
|
|
26
|
+
assert len(samples) == num_samples, "Generated strings should be unique"
|
pytests/test_info.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from httpx import AsyncClient
|
|
2
|
+
from pytests.base_test import get_superman_cookie
|
|
3
|
+
from fastapi import status
|
|
4
|
+
import pytest
|
|
5
|
+
from utils.internal_error_code import InternalErrorCode
|
|
6
|
+
from utils.jwt import sign_jwt
|
|
7
|
+
from utils.settings import settings
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.mark.run(order=6)
|
|
12
|
+
@pytest.mark.anyio
|
|
13
|
+
async def test_info_me(client: AsyncClient) -> None:
|
|
14
|
+
|
|
15
|
+
client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
16
|
+
response = await client.get("/info/me")
|
|
17
|
+
assert response.status_code == status.HTTP_200_OK
|
|
18
|
+
json_response = response.json()
|
|
19
|
+
assert json_response["status"] == "success"
|
|
20
|
+
|
|
21
|
+
@pytest.mark.run(order=6)
|
|
22
|
+
@pytest.mark.anyio
|
|
23
|
+
async def test_info_manifest(client: AsyncClient) -> None:
|
|
24
|
+
|
|
25
|
+
client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
26
|
+
response = await client.get("/info/manifest")
|
|
27
|
+
assert response.status_code == status.HTTP_200_OK
|
|
28
|
+
json_response = response.json()
|
|
29
|
+
assert json_response["status"] == "success"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@pytest.mark.run(order=6)
|
|
33
|
+
@pytest.mark.anyio
|
|
34
|
+
async def test_get_settings_should_pass(client: AsyncClient) -> None:
|
|
35
|
+
client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
36
|
+
response = await client.get("/info/settings")
|
|
37
|
+
assert response.status_code == status.HTTP_200_OK
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# @pytest.mark.run(order=6)
|
|
41
|
+
# @pytest.mark.anyio
|
|
42
|
+
# async def test_in_loop_tasks(client: AsyncClient) -> None:
|
|
43
|
+
# client.cookies.set("auth_token", await get_superman_cookie(client))
|
|
44
|
+
# response = await client.get("/info/in-loop-tasks")
|
|
45
|
+
# assert response.status_code == status.HTTP_200_OK
|
|
46
|
+
# json_response = response.json()
|
|
47
|
+
# assert json_response["status"] == "success"
|
|
48
|
+
# assert "tasks_count" in json_response["attributes"]
|
|
49
|
+
# assert isinstance(json_response["attributes"]["tasks_count"], int)
|
|
50
|
+
# assert "tasks" in json_response["attributes"]
|
|
51
|
+
# assert isinstance(json_response["attributes"]["tasks"], list)
|
|
52
|
+
# for task in json_response["attributes"]["tasks"]:
|
|
53
|
+
# assert "name" in task
|
|
54
|
+
# assert "coroutine" in task
|
|
55
|
+
# assert "stack" in task
|
pytests/test_status.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
|
|
2
|
+
# from pytests.base_test import client
|
|
3
|
+
from fastapi import status
|
|
4
|
+
from httpx import AsyncClient
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
# @pytest.mark.asyncio(scope="session")
|
|
8
|
+
@pytest.mark.anyio
|
|
9
|
+
async def test_sanity(client: AsyncClient) -> None:
|
|
10
|
+
# async with my_client as client:
|
|
11
|
+
response = await client.get("/")
|
|
12
|
+
assert response.status_code == status.HTTP_200_OK
|
|
13
|
+
json_response = response.json()
|
|
14
|
+
assert json_response["status"] == "success"
|
|
15
|
+
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import asyncio
|
|
3
|
+
from importlib.util import find_spec, module_from_spec
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
from models.core import Content, Event
|
|
7
|
+
from models.enums import ActionType
|
|
8
|
+
from data_adapters.adapter import data_adapter as db
|
|
9
|
+
from utils.settings import settings
|
|
10
|
+
|
|
11
|
+
CUSTOM_PLUGINS_PATH = settings.spaces_folder / "custom_plugins"
|
|
12
|
+
|
|
13
|
+
# Allow python to search for modules inside the custom plugins
|
|
14
|
+
# be including the path to the parent folder of the custom plugins to sys.path
|
|
15
|
+
back_out_of_project = 2
|
|
16
|
+
back_to_spaces = 0
|
|
17
|
+
|
|
18
|
+
for part in CUSTOM_PLUGINS_PATH.parts:
|
|
19
|
+
if part == "..":
|
|
20
|
+
back_to_spaces += 1
|
|
21
|
+
|
|
22
|
+
if __file__.endswith(".pyc"):
|
|
23
|
+
back_out_of_project += 1
|
|
24
|
+
|
|
25
|
+
sys.path.append(
|
|
26
|
+
"/".join(__file__.split("/")[:-(back_out_of_project+back_to_spaces)]) +
|
|
27
|
+
"/" +
|
|
28
|
+
"/".join(CUSTOM_PLUGINS_PATH.parts[back_to_spaces:-1])
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
def load_notification_plugin():
|
|
32
|
+
# Load the plugin module
|
|
33
|
+
plugin_path = CUSTOM_PLUGINS_PATH / 'send_notification'
|
|
34
|
+
|
|
35
|
+
config_file_path = plugin_path / 'config.json'
|
|
36
|
+
plugin_file_path = plugin_path / 'plugin.py'
|
|
37
|
+
if(
|
|
38
|
+
not config_file_path.is_file() or
|
|
39
|
+
not plugin_file_path.is_file()
|
|
40
|
+
):
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
module_name = f"{CUSTOM_PLUGINS_PATH.parts[-1]}.send_notification.plugin"
|
|
45
|
+
spec = find_spec(module_name)
|
|
46
|
+
if not spec:
|
|
47
|
+
return None
|
|
48
|
+
module = module_from_spec(spec)
|
|
49
|
+
sys.modules[module_name] = module
|
|
50
|
+
return getattr(module, "Plugin")()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
async def main(space, subpath, shortname):
|
|
54
|
+
notification_payload = db.load_resource_payload(
|
|
55
|
+
space_name=space,
|
|
56
|
+
subpath=subpath,
|
|
57
|
+
class_type=Content,
|
|
58
|
+
filename=f"{shortname}.json",
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
if not notification_payload:
|
|
62
|
+
print("The notification entry is not found")
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
event_data = Event(
|
|
66
|
+
space_name=space,
|
|
67
|
+
subpath=subpath,
|
|
68
|
+
shortname=shortname,
|
|
69
|
+
action_type=ActionType.create,
|
|
70
|
+
attributes={
|
|
71
|
+
"payload": {
|
|
72
|
+
"body": notification_payload
|
|
73
|
+
}
|
|
74
|
+
},
|
|
75
|
+
user_shortname="__SYSTEM__"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
plugin_obj = load_notification_plugin()
|
|
80
|
+
if not plugin_obj:
|
|
81
|
+
print("The plugin is not found")
|
|
82
|
+
return
|
|
83
|
+
|
|
84
|
+
await plugin_obj.hook(event_data)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
if __name__ == "__main__":
|
|
88
|
+
parser = argparse.ArgumentParser(
|
|
89
|
+
description="Execute the custom_plugins/send_notification plugin",
|
|
90
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
91
|
+
)
|
|
92
|
+
parser.add_argument("--space", help="The space where the notification request and user records are stored")
|
|
93
|
+
parser.add_argument("--notification-subpath", help="The subpath of the notification request")
|
|
94
|
+
parser.add_argument("--notification-shortname", help="The shortname of the notification request")
|
|
95
|
+
|
|
96
|
+
args = parser.parse_args()
|
|
97
|
+
|
|
98
|
+
asyncio.run(main(args.space, args.notification_subpath, args.notification_shortname))
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
#!/usr/bin/env -S BACKEND_ENV=config.env python3
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
|
|
4
|
+
from models.api import Query
|
|
5
|
+
from models.core import Content, Notification, NotificationData, Translation
|
|
6
|
+
from data_adapters.adapter import data_adapter as db
|
|
7
|
+
from utils.notification import NotificationManager
|
|
8
|
+
from utils.settings import settings
|
|
9
|
+
from fastapi.logger import logger
|
|
10
|
+
import asyncio
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def trigger_admin_notifications() -> None:
|
|
14
|
+
from_time = int((datetime.now() - timedelta(minutes=15)).timestamp() * 1000)
|
|
15
|
+
to_time = int(datetime.now().timestamp() * 1000)
|
|
16
|
+
total, admin_notifications = await db.query(Query(
|
|
17
|
+
space_name=settings.management_space, schema_name="admin_notification_request",
|
|
18
|
+
search=f"@subpath:/notifications/admin (-@status:finished) @scheduled_at:[{from_time} {to_time}]",
|
|
19
|
+
filters={}, limit=10000, offset=0)
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
if total == 0:
|
|
23
|
+
return
|
|
24
|
+
|
|
25
|
+
notification_manager = NotificationManager()
|
|
26
|
+
for notification in admin_notifications:
|
|
27
|
+
notification_dict = notification.model_dump()
|
|
28
|
+
formatted_req = await prepare_request(notification_dict)
|
|
29
|
+
|
|
30
|
+
# Get notification receivers users
|
|
31
|
+
search_criteria = notification_dict.get('msisdns_search_string')
|
|
32
|
+
if not search_criteria:
|
|
33
|
+
search_criteria = '@msisdn:' + '|'.join(notification_dict.get('msisdns', ""))
|
|
34
|
+
|
|
35
|
+
total, receivers = await db.query(Query(
|
|
36
|
+
space_name=settings.management_space, schema_name="user",
|
|
37
|
+
search=f"@subpath:users {search_criteria}",
|
|
38
|
+
filters={}, limit=10000, offset=0)
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
if total == 0:
|
|
42
|
+
continue
|
|
43
|
+
|
|
44
|
+
# Try to send the notification
|
|
45
|
+
# and update the notification status to finished
|
|
46
|
+
formatted_req = await prepare_request(notification_dict)
|
|
47
|
+
try:
|
|
48
|
+
for receiver_data in receivers:
|
|
49
|
+
if not formatted_req["push_only"]:
|
|
50
|
+
notification_obj = await Notification.from_request(
|
|
51
|
+
notification_dict
|
|
52
|
+
)
|
|
53
|
+
await db.internal_save_model(
|
|
54
|
+
space_name="personal",
|
|
55
|
+
subpath=f"people/{receiver_data.shortname}/notifications",
|
|
56
|
+
meta=notification_obj,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
for platform in formatted_req["platforms"]:
|
|
60
|
+
await notification_manager.send(
|
|
61
|
+
platform=platform,
|
|
62
|
+
data=NotificationData(
|
|
63
|
+
receiver=receiver_data.to_dict(),
|
|
64
|
+
title=formatted_req["title"],
|
|
65
|
+
body=formatted_req["body"],
|
|
66
|
+
image_urls=formatted_req["images_urls"],
|
|
67
|
+
),
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
notification_meta = await db.load_or_none(
|
|
71
|
+
settings.management_space,
|
|
72
|
+
notification_dict["subpath"],
|
|
73
|
+
notification_dict["shortname"],
|
|
74
|
+
Content,
|
|
75
|
+
notification_dict["owner_shortname"],
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
if notification_meta is None:
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
await db.internal_sys_update_model(
|
|
82
|
+
settings.management_space,
|
|
83
|
+
notification_dict["subpath"],
|
|
84
|
+
notification_meta,
|
|
85
|
+
{"status": "finished"},
|
|
86
|
+
)
|
|
87
|
+
except Exception as e:
|
|
88
|
+
logger.error(
|
|
89
|
+
f"Error at sending/updating admin based notification: {e.args}"
|
|
90
|
+
)
|
|
91
|
+
pass
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
async def prepare_request(notification_dict) -> dict:
|
|
95
|
+
# Get Notification Request Images
|
|
96
|
+
attachments_path = (
|
|
97
|
+
settings.spaces_folder
|
|
98
|
+
/ f"{settings.management_space}"
|
|
99
|
+
f"/{notification_dict['subpath']}/.dm/{notification_dict['shortname']}"
|
|
100
|
+
)
|
|
101
|
+
notification_attachments = await db.get_entry_attachments(
|
|
102
|
+
subpath=f"{notification_dict['subpath']}/{notification_dict['shortname']}",
|
|
103
|
+
attachments_path=attachments_path,
|
|
104
|
+
)
|
|
105
|
+
notification_images = {
|
|
106
|
+
"en": notification_attachments.get("media", {}).get("en"),
|
|
107
|
+
"ar": notification_attachments.get("media", {}).get("ar"),
|
|
108
|
+
"ku": notification_attachments.get("media", {}).get("ku"),
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
return {
|
|
112
|
+
"platforms": notification_dict["types"],
|
|
113
|
+
"title": Translation(**notification_dict["displayname"]),
|
|
114
|
+
"body": Translation(**notification_dict["description"]),
|
|
115
|
+
"images_urls": Translation(**notification_images),
|
|
116
|
+
"push_only": notification_dict.get("push_only", False),
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
if __name__ == "__main__":
|
|
121
|
+
asyncio.run(trigger_admin_notifications())
|