dmart 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alembic/__init__.py +0 -0
- alembic/env.py +91 -0
- alembic/scripts/__init__.py +0 -0
- alembic/scripts/calculate_checksums.py +77 -0
- alembic/scripts/migration_f7a4949eed19.py +28 -0
- alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
- alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
- alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- alembic/versions/74288ccbd3b5_initial.py +264 -0
- alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
- alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- alembic/versions/__init__.py +0 -0
- alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- api/__init__.py +0 -0
- api/info/__init__.py +0 -0
- api/info/router.py +109 -0
- api/managed/__init__.py +0 -0
- api/managed/router.py +1541 -0
- api/managed/utils.py +1850 -0
- api/public/__init__.py +0 -0
- api/public/router.py +758 -0
- api/qr/__init__.py +0 -0
- api/qr/router.py +108 -0
- api/user/__init__.py +0 -0
- api/user/model/__init__.py +0 -0
- api/user/model/errors.py +14 -0
- api/user/model/requests.py +165 -0
- api/user/model/responses.py +11 -0
- api/user/router.py +1401 -0
- api/user/service.py +270 -0
- bundler.py +44 -0
- config/__init__.py +0 -0
- config/channels.json +11 -0
- config/notification.json +17 -0
- data_adapters/__init__.py +0 -0
- data_adapters/adapter.py +16 -0
- data_adapters/base_data_adapter.py +467 -0
- data_adapters/file/__init__.py +0 -0
- data_adapters/file/adapter.py +2043 -0
- data_adapters/file/adapter_helpers.py +1013 -0
- data_adapters/file/archive.py +150 -0
- data_adapters/file/create_index.py +331 -0
- data_adapters/file/create_users_folders.py +52 -0
- data_adapters/file/custom_validations.py +68 -0
- data_adapters/file/drop_index.py +40 -0
- data_adapters/file/health_check.py +560 -0
- data_adapters/file/redis_services.py +1110 -0
- data_adapters/helpers.py +27 -0
- data_adapters/sql/__init__.py +0 -0
- data_adapters/sql/adapter.py +3210 -0
- data_adapters/sql/adapter_helpers.py +491 -0
- data_adapters/sql/create_tables.py +451 -0
- data_adapters/sql/create_users_folders.py +53 -0
- data_adapters/sql/db_to_json_migration.py +482 -0
- data_adapters/sql/health_check_sql.py +232 -0
- data_adapters/sql/json_to_db_migration.py +454 -0
- data_adapters/sql/update_query_policies.py +101 -0
- data_generator.py +81 -0
- dmart-0.1.9.dist-info/METADATA +64 -0
- dmart-0.1.9.dist-info/RECORD +149 -0
- dmart-0.1.9.dist-info/WHEEL +5 -0
- dmart-0.1.9.dist-info/entry_points.txt +2 -0
- dmart-0.1.9.dist-info/top_level.txt +23 -0
- dmart.py +513 -0
- get_settings.py +7 -0
- languages/__init__.py +0 -0
- languages/arabic.json +15 -0
- languages/english.json +16 -0
- languages/kurdish.json +14 -0
- languages/loader.py +13 -0
- main.py +506 -0
- migrate.py +24 -0
- models/__init__.py +0 -0
- models/api.py +203 -0
- models/core.py +597 -0
- models/enums.py +255 -0
- password_gen.py +8 -0
- plugins/__init__.py +0 -0
- plugins/action_log/__init__.py +0 -0
- plugins/action_log/plugin.py +121 -0
- plugins/admin_notification_sender/__init__.py +0 -0
- plugins/admin_notification_sender/plugin.py +124 -0
- plugins/ldap_manager/__init__.py +0 -0
- plugins/ldap_manager/plugin.py +100 -0
- plugins/local_notification/__init__.py +0 -0
- plugins/local_notification/plugin.py +123 -0
- plugins/realtime_updates_notifier/__init__.py +0 -0
- plugins/realtime_updates_notifier/plugin.py +58 -0
- plugins/redis_db_update/__init__.py +0 -0
- plugins/redis_db_update/plugin.py +188 -0
- plugins/resource_folders_creation/__init__.py +0 -0
- plugins/resource_folders_creation/plugin.py +81 -0
- plugins/system_notification_sender/__init__.py +0 -0
- plugins/system_notification_sender/plugin.py +188 -0
- plugins/update_access_controls/__init__.py +0 -0
- plugins/update_access_controls/plugin.py +9 -0
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +16 -0
- pytests/api_user_models_requests_test.py +98 -0
- pytests/archive_test.py +72 -0
- pytests/base_test.py +300 -0
- pytests/get_settings_test.py +14 -0
- pytests/json_to_db_migration_test.py +237 -0
- pytests/service_test.py +26 -0
- pytests/test_info.py +55 -0
- pytests/test_status.py +15 -0
- run_notification_campaign.py +98 -0
- scheduled_notification_handler.py +121 -0
- schema_migration.py +208 -0
- schema_modulate.py +192 -0
- set_admin_passwd.py +55 -0
- sync.py +202 -0
- utils/__init__.py +0 -0
- utils/access_control.py +306 -0
- utils/async_request.py +8 -0
- utils/exporter.py +309 -0
- utils/firebase_notifier.py +57 -0
- utils/generate_email.py +38 -0
- utils/helpers.py +352 -0
- utils/hypercorn_config.py +12 -0
- utils/internal_error_code.py +60 -0
- utils/jwt.py +124 -0
- utils/logger.py +167 -0
- utils/middleware.py +99 -0
- utils/notification.py +75 -0
- utils/password_hashing.py +16 -0
- utils/plugin_manager.py +215 -0
- utils/query_policies_helper.py +112 -0
- utils/regex.py +44 -0
- utils/repository.py +529 -0
- utils/router_helper.py +19 -0
- utils/settings.py +165 -0
- utils/sms_notifier.py +21 -0
- utils/social_sso.py +67 -0
- utils/templates/activation.html.j2 +26 -0
- utils/templates/reminder.html.j2 +17 -0
- utils/ticket_sys_utils.py +203 -0
- utils/web_notifier.py +29 -0
- websocket.py +231 -0
utils/generate_email.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from jinja2 import Environment, FileSystemLoader
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def generate_email_from_template(template, data):
|
|
7
|
+
templates_dir = Path(__file__).resolve().parent / "templates"
|
|
8
|
+
environment = Environment(
|
|
9
|
+
loader=FileSystemLoader(str(templates_dir))
|
|
10
|
+
)
|
|
11
|
+
match template:
|
|
12
|
+
case "activation":
|
|
13
|
+
template = environment.get_template("activation.html.j2")
|
|
14
|
+
return template.render(
|
|
15
|
+
name=data.get("name"),
|
|
16
|
+
msisdn=data.get("msisdn"),
|
|
17
|
+
shortname=data.get("shortname"),
|
|
18
|
+
link=data.get("link"),
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
case "reminder":
|
|
22
|
+
template = environment.get_template("reminder.html.j2")
|
|
23
|
+
return template.render(
|
|
24
|
+
name=data.get("name"),
|
|
25
|
+
link=data.get("link"),
|
|
26
|
+
)
|
|
27
|
+
case _:
|
|
28
|
+
return ""
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def generate_subject(template):
|
|
32
|
+
match template:
|
|
33
|
+
case "activation":
|
|
34
|
+
return "Welcome to our Platform!"
|
|
35
|
+
case "reminder":
|
|
36
|
+
return "[Reminder] Activate Your Account"
|
|
37
|
+
case _:
|
|
38
|
+
return ""
|
utils/helpers.py
ADDED
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
import csv
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from re import sub as re_sub
|
|
7
|
+
from uuid import UUID
|
|
8
|
+
|
|
9
|
+
import aiofiles
|
|
10
|
+
from jsonschema.validators import _RefResolver as RefResolver # type: ignore
|
|
11
|
+
|
|
12
|
+
# TBD from referencing import Registry, Resource
|
|
13
|
+
# TBD import referencing.jsonschema
|
|
14
|
+
from collections.abc import MutableMapping
|
|
15
|
+
from models.enums import Language
|
|
16
|
+
from typing import Any
|
|
17
|
+
from languages.loader import languages
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def flatten_all(d: MutableMapping, parent_key: str = "", sep: str = ".") -> dict:
|
|
21
|
+
items: list = []
|
|
22
|
+
for k, v in d.items():
|
|
23
|
+
new_key = parent_key + sep + k if parent_key else k
|
|
24
|
+
if isinstance(v, MutableMapping):
|
|
25
|
+
items.extend(flatten_all(v, new_key, sep=sep).items())
|
|
26
|
+
elif isinstance(v, list):
|
|
27
|
+
items.extend(flatten_all(flatten_list(v), new_key, sep=sep).items())
|
|
28
|
+
else:
|
|
29
|
+
items.append((new_key, v))
|
|
30
|
+
return dict(items)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def flatten_dict(d, parent_key='', sep='.'):
|
|
34
|
+
items = []
|
|
35
|
+
for k, v in d.items():
|
|
36
|
+
new_key = f"{parent_key}{sep}{k}" if parent_key else k
|
|
37
|
+
if isinstance(v, dict):
|
|
38
|
+
items.extend(flatten_dict(v, new_key, sep=sep).items())
|
|
39
|
+
else:
|
|
40
|
+
items.append((new_key, v))
|
|
41
|
+
return dict(items)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def flatten_list(ll: list, key: str | None = None):
|
|
45
|
+
flattened = {}
|
|
46
|
+
for idx, item in enumerate(ll):
|
|
47
|
+
flattened[f"{key}.{idx}" if key else f"{idx}"] = item
|
|
48
|
+
return flattened
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def arr_remove_common(arr1: list, arr2: list):
|
|
52
|
+
for i1 in arr1[:]:
|
|
53
|
+
if i1 in arr2:
|
|
54
|
+
arr1.remove(i1)
|
|
55
|
+
arr2.remove(i1)
|
|
56
|
+
|
|
57
|
+
return arr1, arr2
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def get_removed_items(arr1: list, arr2: list):
|
|
61
|
+
removed_items = []
|
|
62
|
+
|
|
63
|
+
for i1 in arr1:
|
|
64
|
+
if i1 not in arr2:
|
|
65
|
+
removed_items.append(i1)
|
|
66
|
+
|
|
67
|
+
return removed_items
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def flatten_list_of_dicts_in_dict(d: dict) -> dict:
|
|
71
|
+
"""
|
|
72
|
+
example:
|
|
73
|
+
d = {
|
|
74
|
+
"key": [
|
|
75
|
+
{
|
|
76
|
+
'imsi': '12345',
|
|
77
|
+
'name': 'Saad Adel'
|
|
78
|
+
},
|
|
79
|
+
{
|
|
80
|
+
'imsi': '556566',
|
|
81
|
+
'name': 'Saad Adel'
|
|
82
|
+
}
|
|
83
|
+
],
|
|
84
|
+
"another": "s",
|
|
85
|
+
"another2": 222,
|
|
86
|
+
}
|
|
87
|
+
return {
|
|
88
|
+
"key.imsi": ['12345', '556566'],
|
|
89
|
+
"key.name": ['Saad Adel', 'Saad Adel'],
|
|
90
|
+
"another": "s",
|
|
91
|
+
"another2": 222,
|
|
92
|
+
}
|
|
93
|
+
"""
|
|
94
|
+
flattened_d = deepcopy(d)
|
|
95
|
+
for parent_key, list_of_dict in d.items():
|
|
96
|
+
if (
|
|
97
|
+
isinstance(list_of_dict, list)
|
|
98
|
+
and len(list_of_dict) > 0
|
|
99
|
+
and isinstance(list_of_dict[0], dict)
|
|
100
|
+
):
|
|
101
|
+
flattened: dict = {}
|
|
102
|
+
for dict_item in list_of_dict:
|
|
103
|
+
for key, value in dict_item.items():
|
|
104
|
+
flattened.setdefault(f"{parent_key}.{key}", [])
|
|
105
|
+
flattened[f"{parent_key}.{key}"].append(value)
|
|
106
|
+
flattened_d.pop(parent_key)
|
|
107
|
+
flattened_d.update(flattened)
|
|
108
|
+
|
|
109
|
+
return flattened_d
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def resolve_schema_references(schema: dict, refs: dict = {}) -> dict:
|
|
113
|
+
"""Resolves and replaces json-schema $refs with the appropriate dict.
|
|
114
|
+
|
|
115
|
+
Recursively walks the given schema dict, converting every instance
|
|
116
|
+
of $ref in a 'properties' structure with a resolved dict.
|
|
117
|
+
|
|
118
|
+
This modifies the input schema and also returns it.
|
|
119
|
+
|
|
120
|
+
Arguments:
|
|
121
|
+
schema:
|
|
122
|
+
the schema dict
|
|
123
|
+
refs:
|
|
124
|
+
a dict of <string, dict> which forms a store of referenced schemata
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
schema
|
|
128
|
+
"""
|
|
129
|
+
refs = refs or {}
|
|
130
|
+
resolved_schema = _resolve_schema_references(
|
|
131
|
+
schema, RefResolver("", schema, store=refs)
|
|
132
|
+
)
|
|
133
|
+
if "definitions" in resolved_schema:
|
|
134
|
+
resolved_schema.pop("definitions")
|
|
135
|
+
return resolved_schema
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _resolve_schema_references(schema: dict, resolver) -> dict:
|
|
139
|
+
if "$ref" in schema:
|
|
140
|
+
reference_path = schema.pop("$ref", None)
|
|
141
|
+
resolved = resolver.resolve(reference_path)[1]
|
|
142
|
+
schema.update(resolved)
|
|
143
|
+
return _resolve_schema_references(schema, resolver)
|
|
144
|
+
|
|
145
|
+
if "properties" in schema:
|
|
146
|
+
for k, val in schema["properties"].items():
|
|
147
|
+
schema["properties"][k] = _resolve_schema_references(val, resolver)
|
|
148
|
+
|
|
149
|
+
if "patternProperties" in schema:
|
|
150
|
+
for k, val in schema["patternProperties"].items():
|
|
151
|
+
schema["patternProperties"][k] = _resolve_schema_references(val, resolver)
|
|
152
|
+
|
|
153
|
+
if "items" in schema:
|
|
154
|
+
schema["items"] = _resolve_schema_references(schema["items"], resolver)
|
|
155
|
+
|
|
156
|
+
if "anyOf" in schema:
|
|
157
|
+
for i, element in enumerate(schema["anyOf"]):
|
|
158
|
+
schema["anyOf"][i] = _resolve_schema_references(element, resolver)
|
|
159
|
+
|
|
160
|
+
if "oneOf" in schema:
|
|
161
|
+
for i, element in enumerate(schema["oneOf"]):
|
|
162
|
+
schema["oneOf"][i] = _resolve_schema_references(element, resolver)
|
|
163
|
+
|
|
164
|
+
return schema
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def camel_case(snake_str):
|
|
168
|
+
words = snake_str.split("_")
|
|
169
|
+
return "".join(word.title() for word in words)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def snake_case(camel_str):
|
|
173
|
+
return re_sub(r"(?<!^)(?=[A-Z])", "_", camel_str).lower()
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def divide_chunks(lll, n):
|
|
177
|
+
"""
|
|
178
|
+
Yield successive n-sized chunks from lll.
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
# looping till length l
|
|
182
|
+
for i in range(0, len(lll), n):
|
|
183
|
+
yield lll[i : i + n]
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def remove_none_dict(target: dict[str, Any] ) -> dict[str, Any]:
|
|
187
|
+
new_d: dict = {}
|
|
188
|
+
for key, val in target.items():
|
|
189
|
+
if val is None:
|
|
190
|
+
continue
|
|
191
|
+
|
|
192
|
+
if isinstance(val, dict) :
|
|
193
|
+
new_d[key] = remove_none_dict(val)
|
|
194
|
+
elif isinstance(val, list):
|
|
195
|
+
new_d[key] = remove_none_list(val)
|
|
196
|
+
else:
|
|
197
|
+
new_d[key] = val
|
|
198
|
+
|
|
199
|
+
return new_d
|
|
200
|
+
|
|
201
|
+
def remove_none_list(target: list):
|
|
202
|
+
new_l: list = []
|
|
203
|
+
for val in target:
|
|
204
|
+
if val is None:
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
if isinstance(val, dict) :
|
|
208
|
+
new_l.append(remove_none_dict(val))
|
|
209
|
+
elif isinstance(val, list):
|
|
210
|
+
new_l.append(remove_none_list(val))
|
|
211
|
+
else:
|
|
212
|
+
new_l.append(val)
|
|
213
|
+
|
|
214
|
+
return new_l
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def alter_dict_keys(
|
|
218
|
+
target: dict,
|
|
219
|
+
include: list | None = None,
|
|
220
|
+
exclude: list | None = None,
|
|
221
|
+
parents: str = "",
|
|
222
|
+
):
|
|
223
|
+
result: dict = {}
|
|
224
|
+
for k in list(target):
|
|
225
|
+
search_for = f"{parents}.{k}" if parents else f"{k}"
|
|
226
|
+
if isinstance(target[k], dict):
|
|
227
|
+
if include and search_for in include:
|
|
228
|
+
result[k] = target[k]
|
|
229
|
+
continue
|
|
230
|
+
if exclude and search_for in exclude:
|
|
231
|
+
continue
|
|
232
|
+
result[k] = alter_dict_keys(
|
|
233
|
+
target[k], include, exclude, search_for if parents else f"{k}"
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
elif (include and search_for not in include) or (
|
|
237
|
+
exclude and search_for in exclude
|
|
238
|
+
):
|
|
239
|
+
continue
|
|
240
|
+
|
|
241
|
+
else:
|
|
242
|
+
result[k] = target[k]
|
|
243
|
+
|
|
244
|
+
return result
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def json_flater(data: dict[str, Any]) -> dict[str, Any]:
|
|
248
|
+
flatened_data = {}
|
|
249
|
+
for k, v in data.items():
|
|
250
|
+
if isinstance(v, dict):
|
|
251
|
+
__flatened_data = json_flater(v)
|
|
252
|
+
_flatened_data = {
|
|
253
|
+
key: val for key, val in __flatened_data.items()
|
|
254
|
+
} # deep copy to resolve the runtime error
|
|
255
|
+
_keys = list(_flatened_data.keys())
|
|
256
|
+
for key in _keys:
|
|
257
|
+
flatened_data[f"{k}.{key}"] = _flatened_data[key]
|
|
258
|
+
if k in flatened_data and key in flatened_data[k]:
|
|
259
|
+
del flatened_data[k][key]
|
|
260
|
+
else:
|
|
261
|
+
flatened_data = {**flatened_data, k: v}
|
|
262
|
+
return flatened_data
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def lang_code(lang: Language):
|
|
266
|
+
match lang:
|
|
267
|
+
case Language.ar:
|
|
268
|
+
return "ar"
|
|
269
|
+
case Language.en:
|
|
270
|
+
return "en"
|
|
271
|
+
case Language.ku:
|
|
272
|
+
return "ku"
|
|
273
|
+
case Language.fr:
|
|
274
|
+
return "fr"
|
|
275
|
+
case Language.tr:
|
|
276
|
+
return "tr"
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def replace_message_vars(message: str, dest_data: dict, locale: str):
|
|
280
|
+
dest_data_dict = flatten_dict(dest_data)
|
|
281
|
+
for field, value in dest_data_dict.items():
|
|
282
|
+
if isinstance(value, dict) and locale in value:
|
|
283
|
+
value = value[locale]
|
|
284
|
+
if field in ["created_at", "updated_at"]:
|
|
285
|
+
message = message.replace(
|
|
286
|
+
f"{{{field}}}",
|
|
287
|
+
datetime.strptime(str(value), "%Y-%m-%d %H:%M:%S.%f").strftime(
|
|
288
|
+
"%Y-%m-%d %H:%M:%S"
|
|
289
|
+
),
|
|
290
|
+
)
|
|
291
|
+
else:
|
|
292
|
+
message = message.replace(
|
|
293
|
+
f"{{{field}}}", languages[Language[locale]].get(str(value), str(value))
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
return re_sub(r"\{\w*.*\}", "", message)
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def str_to_datetime(str: str, format: str = "%Y-%m-%dT%H:%M:%S.%f"):
|
|
300
|
+
return datetime.strptime(str, format)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def pp(*args, **kwargs):
|
|
304
|
+
"""
|
|
305
|
+
Pretty Print
|
|
306
|
+
"""
|
|
307
|
+
print_str = "\n\n================== DUMP DATA ==================\n"
|
|
308
|
+
if args:
|
|
309
|
+
for arg in args:
|
|
310
|
+
print_str += f"\n\narg: {arg}"
|
|
311
|
+
|
|
312
|
+
if kwargs:
|
|
313
|
+
for k, v in kwargs.items():
|
|
314
|
+
print_str += f"\n\n{k}: {v}"
|
|
315
|
+
|
|
316
|
+
print_str += "\n\n_____________________END________________________\n\n"
|
|
317
|
+
print(print_str)
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
async def csv_file_to_json(csv_file_path: Path) -> list[dict[str, Any]]:
|
|
321
|
+
data: list[dict[str, Any]] = []
|
|
322
|
+
|
|
323
|
+
async with aiofiles.open(
|
|
324
|
+
csv_file_path, mode="r", encoding="utf-8", newline=""
|
|
325
|
+
) as csvf:
|
|
326
|
+
contents = await csvf.readlines()
|
|
327
|
+
csvReader = csv.DictReader(contents)
|
|
328
|
+
|
|
329
|
+
for row in csvReader:
|
|
330
|
+
data.append(row)
|
|
331
|
+
|
|
332
|
+
return data
|
|
333
|
+
|
|
334
|
+
def read_jsonl_file(file_path):
|
|
335
|
+
data = []
|
|
336
|
+
with open(file_path, 'r') as file:
|
|
337
|
+
for line in file:
|
|
338
|
+
data.append(json.loads(line))
|
|
339
|
+
return data
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def jq_dict_parser(data):
|
|
343
|
+
if isinstance(data, dict):
|
|
344
|
+
return {k: jq_dict_parser(v) for k, v in data.items()}
|
|
345
|
+
elif isinstance(data, list):
|
|
346
|
+
return [jq_dict_parser(item) for item in data]
|
|
347
|
+
elif isinstance(data, UUID):
|
|
348
|
+
return str(data)
|
|
349
|
+
elif isinstance(data, datetime):
|
|
350
|
+
return str(data)
|
|
351
|
+
else:
|
|
352
|
+
return data
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from utils.settings import settings
|
|
2
|
+
from os import cpu_count
|
|
3
|
+
from fastapi.logger import logger
|
|
4
|
+
from utils.logger import logging_schema
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
bind = [f"{settings.listening_host}:{settings.listening_port}"]
|
|
8
|
+
workers = cpu_count()
|
|
9
|
+
backlog = 200
|
|
10
|
+
worker_class = "asyncio"
|
|
11
|
+
logconfig_dict = logging_schema
|
|
12
|
+
errorlog = logger
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
class InternalErrorCode:
|
|
2
|
+
NOT_ALLOWED = 401
|
|
3
|
+
UNPROCESSABLE_ENTITY = 424
|
|
4
|
+
INVALID_IDENTIFIER = 420
|
|
5
|
+
INVALID_CONFIRMATION = 427
|
|
6
|
+
SHORTNAME_ALREADY_EXIST = 400
|
|
7
|
+
SHORTNAME_DOES_NOT_EXIST = 404
|
|
8
|
+
INVALID_DATA = 402
|
|
9
|
+
SOMETHING_WRONG = 430
|
|
10
|
+
INVALID_HEALTH_CHECK = 403
|
|
11
|
+
INVALID_APP_KEY = 555
|
|
12
|
+
INVALID_ROUTE = 230
|
|
13
|
+
OBJECT_NOT_FOUND = 220
|
|
14
|
+
INVALID_SPACE_NAME = 203
|
|
15
|
+
CANNT_DELETE = 204
|
|
16
|
+
ALREADY_EXIST_SPACE_NAME = 205
|
|
17
|
+
MISSING_DATA = 202
|
|
18
|
+
NOT_ALLOWED_LOCATION = 206
|
|
19
|
+
PROVID_SOURCE_PATH = 222
|
|
20
|
+
MISSING_DESTINATION_OR_SHORTNAME = 213
|
|
21
|
+
EMAIL_OR_MSISDN_REQUIRED = 207
|
|
22
|
+
MISSING_METADATA = 208
|
|
23
|
+
MISSING_FILTER_SHORTNAMES = 209
|
|
24
|
+
WORKFLOW_BODY_NOT_FOUND = 218
|
|
25
|
+
NOT_SUPPORTED_TYPE = 217
|
|
26
|
+
SOME_SUPPORTED_TYPE = 219
|
|
27
|
+
TICKET_ALREADY_CLOSED = 299
|
|
28
|
+
INVALID_TICKET_STATUS = 300
|
|
29
|
+
DIR_NOT_FOUND = 22
|
|
30
|
+
LOCK_UNAVAILABLE = 30
|
|
31
|
+
LOCKED_ENTRY = 31
|
|
32
|
+
QR_ERROR = 14
|
|
33
|
+
QR_EXPIRED = 15
|
|
34
|
+
QR_INVALID = 16
|
|
35
|
+
INVALID_INVITATION = 125
|
|
36
|
+
INVALID_PASSWORD_RULES = 17
|
|
37
|
+
INVALID_USERNAME_AND_PASS = 10
|
|
38
|
+
USER_ACCOUNT_LOCKED = 110
|
|
39
|
+
USER_ISNT_VERIFIED = 11
|
|
40
|
+
PASSWORD_NOT_VALIDATED = 13
|
|
41
|
+
PASSWORD_RESET_ERROR = 102
|
|
42
|
+
UNMATCHED_DATA = 19
|
|
43
|
+
CONFLICT = 409
|
|
44
|
+
USERNAME_NOT_EXIST = 18
|
|
45
|
+
OTP_INVALID = 307
|
|
46
|
+
OTP_EXPIRED = 308
|
|
47
|
+
OTP_FAILED = 104
|
|
48
|
+
OTP_ISSUE = 100
|
|
49
|
+
OTP_NEEDED = 115
|
|
50
|
+
OTP_RESEND_BLOCKED = 103
|
|
51
|
+
INVALID_STANDALONE_DATA = 107
|
|
52
|
+
ONE_ARGUMENT_ALLOWED = 101
|
|
53
|
+
DATA_SHOULD_BE_UNIQUE = 415
|
|
54
|
+
INVALID_TOKEN = 47
|
|
55
|
+
EXPIRED_TOKEN = 48
|
|
56
|
+
NOT_AUTHENTICATED = 49
|
|
57
|
+
SESSION = 50
|
|
58
|
+
OBJECT_NOT_SAVED = 51
|
|
59
|
+
JQ_TIMEOUT = 120
|
|
60
|
+
JQ_ERROR = 121
|
utils/jwt.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
from time import time
|
|
2
|
+
from typing import Optional, Any
|
|
3
|
+
|
|
4
|
+
from fastapi import Request, status
|
|
5
|
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
|
6
|
+
|
|
7
|
+
import jwt
|
|
8
|
+
import models.api as api
|
|
9
|
+
from utils.internal_error_code import InternalErrorCode
|
|
10
|
+
from utils.settings import settings
|
|
11
|
+
from data_adapters.adapter import data_adapter as db
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def decode_jwt(token: str) -> dict[str, Any]:
|
|
15
|
+
decoded_token: dict
|
|
16
|
+
try:
|
|
17
|
+
decoded_token = jwt.decode(
|
|
18
|
+
token, settings.jwt_secret, algorithms=[settings.jwt_algorithm]
|
|
19
|
+
)
|
|
20
|
+
except Exception:
|
|
21
|
+
raise api.Exception(
|
|
22
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
23
|
+
api.Error(type="jwtauth", code=InternalErrorCode.INVALID_TOKEN, message="Invalid Token [1]"),
|
|
24
|
+
)
|
|
25
|
+
if (
|
|
26
|
+
not decoded_token
|
|
27
|
+
or "data" not in decoded_token
|
|
28
|
+
or "expires" not in decoded_token
|
|
29
|
+
):
|
|
30
|
+
raise api.Exception(
|
|
31
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
32
|
+
api.Error(type="jwtauth", code=InternalErrorCode.INVALID_TOKEN, message="Invalid Token [2]"),
|
|
33
|
+
)
|
|
34
|
+
if decoded_token["expires"] <= time():
|
|
35
|
+
raise api.Exception(
|
|
36
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
37
|
+
api.Error(type="jwtauth", code=InternalErrorCode.EXPIRED_TOKEN, message="Expired Token"),
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
if (
|
|
41
|
+
isinstance(decoded_token["data"], dict)
|
|
42
|
+
and decoded_token["data"].get("shortname") is not None
|
|
43
|
+
):
|
|
44
|
+
return decoded_token["data"]
|
|
45
|
+
else:
|
|
46
|
+
raise api.Exception(
|
|
47
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
48
|
+
api.Error(type="jwtauth", code=InternalErrorCode.INVALID_TOKEN, message="Invalid Token [3]"),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class JWTBearer():
|
|
53
|
+
is_required: bool = True
|
|
54
|
+
http_bearer: HTTPBearer
|
|
55
|
+
|
|
56
|
+
def __init__(self, auto_error: bool = True, is_required: bool = True):
|
|
57
|
+
self.http_bearer = HTTPBearer(auto_error=auto_error)
|
|
58
|
+
self.is_required = is_required
|
|
59
|
+
|
|
60
|
+
async def __call__(self, request: Request) -> str | None:
|
|
61
|
+
user_shortname: str | None = None
|
|
62
|
+
auth_token: str | None = None
|
|
63
|
+
try:
|
|
64
|
+
# Handle token received in Auth header
|
|
65
|
+
credentials: Optional[HTTPAuthorizationCredentials] = await self.http_bearer.__call__(request)
|
|
66
|
+
if credentials and credentials.scheme == "Bearer":
|
|
67
|
+
auth_token = credentials.credentials
|
|
68
|
+
|
|
69
|
+
except Exception:
|
|
70
|
+
# Handle token received in the cookie
|
|
71
|
+
auth_token = request.cookies.get("auth_token")
|
|
72
|
+
|
|
73
|
+
if not auth_token:
|
|
74
|
+
raise api.Exception(
|
|
75
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
76
|
+
api.Error(type="jwtauth", code=InternalErrorCode.NOT_AUTHENTICATED, message="Not authenticated [1]"),
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
decoded = decode_jwt(auth_token)
|
|
80
|
+
user_shortname = decoded["shortname"]
|
|
81
|
+
if not user_shortname:
|
|
82
|
+
raise api.Exception(
|
|
83
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
84
|
+
api.Error(type="jwtauth", code=InternalErrorCode.NOT_AUTHENTICATED, message="Not authenticated [2]"),
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
if decoded["type"] != 'bot' and settings.session_inactivity_ttl:
|
|
88
|
+
_, user_session_token = await db.get_user_session(user_shortname, auth_token)
|
|
89
|
+
if not isinstance(user_session_token, str):
|
|
90
|
+
raise api.Exception(
|
|
91
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
92
|
+
api.Error(
|
|
93
|
+
type="jwtauth", code=InternalErrorCode.NOT_AUTHENTICATED, message="Not authenticated [3]"
|
|
94
|
+
),
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return user_shortname
|
|
98
|
+
|
|
99
|
+
class GetJWTToken:
|
|
100
|
+
http_bearer: HTTPBearer
|
|
101
|
+
def __init__(self, auto_error: bool = True):
|
|
102
|
+
self.http_bearer = HTTPBearer(auto_error=auto_error)
|
|
103
|
+
|
|
104
|
+
async def __call__(self, request: Request) -> str | None:
|
|
105
|
+
try:
|
|
106
|
+
credentials: Optional[HTTPAuthorizationCredentials] = await self.http_bearer.__call__(request)
|
|
107
|
+
if credentials and credentials.scheme == "Bearer":
|
|
108
|
+
return credentials.credentials
|
|
109
|
+
except Exception:
|
|
110
|
+
return request.cookies.get("auth_token")
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def generate_jwt(data: dict, expires: int = 86400) -> str:
|
|
115
|
+
payload = {"data": data, "expires": time() + expires}
|
|
116
|
+
return jwt.encode(payload, settings.jwt_secret, algorithm=settings.jwt_algorithm)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def sign_jwt(data: dict, expires: int = 86400) -> str:
|
|
120
|
+
token = generate_jwt(data, expires)
|
|
121
|
+
if data["type"] != "bot" and settings.session_inactivity_ttl:
|
|
122
|
+
await db.set_user_session(data["shortname"], token)
|
|
123
|
+
return token
|
|
124
|
+
|