dmart 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alembic/__init__.py +0 -0
- alembic/env.py +91 -0
- alembic/scripts/__init__.py +0 -0
- alembic/scripts/calculate_checksums.py +77 -0
- alembic/scripts/migration_f7a4949eed19.py +28 -0
- alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
- alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
- alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- alembic/versions/74288ccbd3b5_initial.py +264 -0
- alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
- alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- alembic/versions/__init__.py +0 -0
- alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- api/__init__.py +0 -0
- api/info/__init__.py +0 -0
- api/info/router.py +109 -0
- api/managed/__init__.py +0 -0
- api/managed/router.py +1541 -0
- api/managed/utils.py +1850 -0
- api/public/__init__.py +0 -0
- api/public/router.py +758 -0
- api/qr/__init__.py +0 -0
- api/qr/router.py +108 -0
- api/user/__init__.py +0 -0
- api/user/model/__init__.py +0 -0
- api/user/model/errors.py +14 -0
- api/user/model/requests.py +165 -0
- api/user/model/responses.py +11 -0
- api/user/router.py +1401 -0
- api/user/service.py +270 -0
- bundler.py +44 -0
- config/__init__.py +0 -0
- config/channels.json +11 -0
- config/notification.json +17 -0
- data_adapters/__init__.py +0 -0
- data_adapters/adapter.py +16 -0
- data_adapters/base_data_adapter.py +467 -0
- data_adapters/file/__init__.py +0 -0
- data_adapters/file/adapter.py +2043 -0
- data_adapters/file/adapter_helpers.py +1013 -0
- data_adapters/file/archive.py +150 -0
- data_adapters/file/create_index.py +331 -0
- data_adapters/file/create_users_folders.py +52 -0
- data_adapters/file/custom_validations.py +68 -0
- data_adapters/file/drop_index.py +40 -0
- data_adapters/file/health_check.py +560 -0
- data_adapters/file/redis_services.py +1110 -0
- data_adapters/helpers.py +27 -0
- data_adapters/sql/__init__.py +0 -0
- data_adapters/sql/adapter.py +3210 -0
- data_adapters/sql/adapter_helpers.py +491 -0
- data_adapters/sql/create_tables.py +451 -0
- data_adapters/sql/create_users_folders.py +53 -0
- data_adapters/sql/db_to_json_migration.py +482 -0
- data_adapters/sql/health_check_sql.py +232 -0
- data_adapters/sql/json_to_db_migration.py +454 -0
- data_adapters/sql/update_query_policies.py +101 -0
- data_generator.py +81 -0
- dmart-0.1.9.dist-info/METADATA +64 -0
- dmart-0.1.9.dist-info/RECORD +149 -0
- dmart-0.1.9.dist-info/WHEEL +5 -0
- dmart-0.1.9.dist-info/entry_points.txt +2 -0
- dmart-0.1.9.dist-info/top_level.txt +23 -0
- dmart.py +513 -0
- get_settings.py +7 -0
- languages/__init__.py +0 -0
- languages/arabic.json +15 -0
- languages/english.json +16 -0
- languages/kurdish.json +14 -0
- languages/loader.py +13 -0
- main.py +506 -0
- migrate.py +24 -0
- models/__init__.py +0 -0
- models/api.py +203 -0
- models/core.py +597 -0
- models/enums.py +255 -0
- password_gen.py +8 -0
- plugins/__init__.py +0 -0
- plugins/action_log/__init__.py +0 -0
- plugins/action_log/plugin.py +121 -0
- plugins/admin_notification_sender/__init__.py +0 -0
- plugins/admin_notification_sender/plugin.py +124 -0
- plugins/ldap_manager/__init__.py +0 -0
- plugins/ldap_manager/plugin.py +100 -0
- plugins/local_notification/__init__.py +0 -0
- plugins/local_notification/plugin.py +123 -0
- plugins/realtime_updates_notifier/__init__.py +0 -0
- plugins/realtime_updates_notifier/plugin.py +58 -0
- plugins/redis_db_update/__init__.py +0 -0
- plugins/redis_db_update/plugin.py +188 -0
- plugins/resource_folders_creation/__init__.py +0 -0
- plugins/resource_folders_creation/plugin.py +81 -0
- plugins/system_notification_sender/__init__.py +0 -0
- plugins/system_notification_sender/plugin.py +188 -0
- plugins/update_access_controls/__init__.py +0 -0
- plugins/update_access_controls/plugin.py +9 -0
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +16 -0
- pytests/api_user_models_requests_test.py +98 -0
- pytests/archive_test.py +72 -0
- pytests/base_test.py +300 -0
- pytests/get_settings_test.py +14 -0
- pytests/json_to_db_migration_test.py +237 -0
- pytests/service_test.py +26 -0
- pytests/test_info.py +55 -0
- pytests/test_status.py +15 -0
- run_notification_campaign.py +98 -0
- scheduled_notification_handler.py +121 -0
- schema_migration.py +208 -0
- schema_modulate.py +192 -0
- set_admin_passwd.py +55 -0
- sync.py +202 -0
- utils/__init__.py +0 -0
- utils/access_control.py +306 -0
- utils/async_request.py +8 -0
- utils/exporter.py +309 -0
- utils/firebase_notifier.py +57 -0
- utils/generate_email.py +38 -0
- utils/helpers.py +352 -0
- utils/hypercorn_config.py +12 -0
- utils/internal_error_code.py +60 -0
- utils/jwt.py +124 -0
- utils/logger.py +167 -0
- utils/middleware.py +99 -0
- utils/notification.py +75 -0
- utils/password_hashing.py +16 -0
- utils/plugin_manager.py +215 -0
- utils/query_policies_helper.py +112 -0
- utils/regex.py +44 -0
- utils/repository.py +529 -0
- utils/router_helper.py +19 -0
- utils/settings.py +165 -0
- utils/sms_notifier.py +21 -0
- utils/social_sso.py +67 -0
- utils/templates/activation.html.j2 +26 -0
- utils/templates/reminder.html.j2 +17 -0
- utils/ticket_sys_utils.py +203 -0
- utils/web_notifier.py +29 -0
- websocket.py +231 -0
|
@@ -0,0 +1,491 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import re
|
|
3
|
+
import subprocess
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import models.api as api
|
|
7
|
+
import models.core as core
|
|
8
|
+
from models.enums import QueryType
|
|
9
|
+
from data_adapters.sql.create_tables import (
|
|
10
|
+
Entries,
|
|
11
|
+
Histories,
|
|
12
|
+
Permissions,
|
|
13
|
+
Roles,
|
|
14
|
+
Users,
|
|
15
|
+
Spaces,
|
|
16
|
+
Aggregated
|
|
17
|
+
)
|
|
18
|
+
from utils.helpers import (
|
|
19
|
+
str_to_datetime,
|
|
20
|
+
)
|
|
21
|
+
from utils.settings import settings
|
|
22
|
+
|
|
23
|
+
postgres_aggregate_functions = [
|
|
24
|
+
"avg",
|
|
25
|
+
"count",
|
|
26
|
+
"max",
|
|
27
|
+
"min",
|
|
28
|
+
"sum",
|
|
29
|
+
"array_agg",
|
|
30
|
+
"string_agg",
|
|
31
|
+
"bool_and",
|
|
32
|
+
"bool_or",
|
|
33
|
+
"bit_and",
|
|
34
|
+
"bit_or",
|
|
35
|
+
"every",
|
|
36
|
+
"json_agg",
|
|
37
|
+
"jsonb_agg",
|
|
38
|
+
"json_object_agg",
|
|
39
|
+
"jsonb_object_agg",
|
|
40
|
+
"mode",
|
|
41
|
+
"regr_avgx",
|
|
42
|
+
"regr_avgy",
|
|
43
|
+
"regr_count",
|
|
44
|
+
"regr_intercept",
|
|
45
|
+
"regr_r2",
|
|
46
|
+
"regr_slope",
|
|
47
|
+
"regr_sxx",
|
|
48
|
+
"regr_sxy",
|
|
49
|
+
"regr_syy",
|
|
50
|
+
"corr",
|
|
51
|
+
"covar_pop",
|
|
52
|
+
"covar_samp",
|
|
53
|
+
"stddev",
|
|
54
|
+
"stddev_pop",
|
|
55
|
+
"stddev_samp",
|
|
56
|
+
"variance",
|
|
57
|
+
"var_pop",
|
|
58
|
+
"var_samp",
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
mysql_aggregate_functions = [
|
|
62
|
+
"avg",
|
|
63
|
+
"count",
|
|
64
|
+
"max",
|
|
65
|
+
"min",
|
|
66
|
+
"sum",
|
|
67
|
+
"group_concat",
|
|
68
|
+
"json_arrayagg",
|
|
69
|
+
"json_objectagg",
|
|
70
|
+
"std",
|
|
71
|
+
"stddev",
|
|
72
|
+
"stddev_pop",
|
|
73
|
+
"stddev_samp",
|
|
74
|
+
"variance",
|
|
75
|
+
"var_pop",
|
|
76
|
+
"var_samp",
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
sqlite_aggregate_functions = [
|
|
80
|
+
"avg",
|
|
81
|
+
"count",
|
|
82
|
+
"group_concat",
|
|
83
|
+
"max",
|
|
84
|
+
"min",
|
|
85
|
+
"sum",
|
|
86
|
+
"total",
|
|
87
|
+
]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def subpath_checker(subpath: str):
|
|
91
|
+
if subpath.endswith("/"):
|
|
92
|
+
subpath = subpath[:-1]
|
|
93
|
+
if not subpath.startswith("/"):
|
|
94
|
+
subpath = '/' + subpath
|
|
95
|
+
return subpath
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def transform_keys_to_sql(path):
|
|
99
|
+
parts = path.split('.')
|
|
100
|
+
sql_path = parts[0]
|
|
101
|
+
if len(parts[1:-1]) != 0:
|
|
102
|
+
sql_path += ' -> ' + ' -> '.join([f"'{part}'" for part in parts[1:-1]])
|
|
103
|
+
sql_path += f" ->> '{parts[-1]}'"
|
|
104
|
+
sql_path.replace("-> ->>", "->>")
|
|
105
|
+
return sql_path
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def validate_search_range(v_str):
|
|
109
|
+
if isinstance(v_str, list):
|
|
110
|
+
return False, v_str
|
|
111
|
+
|
|
112
|
+
date_patterns = [
|
|
113
|
+
# Year only: [2025 2024] or [2025,2024]
|
|
114
|
+
r"^\[\d{4}[\s,]\d{4}\]$",
|
|
115
|
+
# Year-month: [2025-04 2025-01] or [2025-04,2025-01]
|
|
116
|
+
r"^\[\d{4}-\d{2}[\s,]\d{4}-\d{2}\]$",
|
|
117
|
+
# Full date: [2025-04-28 2025-01-15] or [2025-04-28,2025-01-15]
|
|
118
|
+
r"^\[\d{4}-\d{2}-\d{2}[\s,]\d{4}-\d{2}-\d{2}\]$",
|
|
119
|
+
# Date with hours: [2025-04-28T12 2025-01-15T09] or [2025-04-28T12,2025-01-15T09]
|
|
120
|
+
r"^\[\d{4}-\d{2}-\d{2}T\d{2}[\s,]\d{4}-\d{2}-\d{2}T\d{2}\]$",
|
|
121
|
+
# Date with hours and minutes: [2025-04-28T12:30 2025-01-15T09:45] or [2025-04-28T12:30,2025-01-15T09:45]
|
|
122
|
+
r"^\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}[\s,]\d{4}-\d{2}-\d{2}T\d{2}:\d{2}\]$",
|
|
123
|
+
# Date with hours, minutes, and seconds: [2025-04-28T12:30:45 2025-01-15T09:45:30] or [2025-04-28T12:30:45,2025-01-15T09:45:30]
|
|
124
|
+
r"^\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[\s,]\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\]$",
|
|
125
|
+
# Full ISO format with microseconds: [2025-04-28T12:30:45.123456 2025-01-15T09:45:30.654321] or [2025-04-28T12:30:45.123456,2025-01-15T09:45:30.654321]
|
|
126
|
+
r"^\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+[\s,]\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+\]$",
|
|
127
|
+
]
|
|
128
|
+
|
|
129
|
+
for pattern in date_patterns:
|
|
130
|
+
if re.match(pattern, v_str):
|
|
131
|
+
# Split on either space or comma
|
|
132
|
+
if ',' in v_str[1:-1]:
|
|
133
|
+
range_values = v_str[1:-1].split(',')
|
|
134
|
+
else:
|
|
135
|
+
range_values = v_str[1:-1].split()
|
|
136
|
+
return True, range_values
|
|
137
|
+
|
|
138
|
+
if re.match(r"^\[-?\d+(?:\.\d+)?[\s,]-?\d+(?:\.\d+)?\]$", v_str):
|
|
139
|
+
if ',' in v_str[1:-1]:
|
|
140
|
+
v_list = v_str[1:-1].split(',')
|
|
141
|
+
else:
|
|
142
|
+
v_list = v_str[1:-1].split()
|
|
143
|
+
return True, v_list
|
|
144
|
+
|
|
145
|
+
return False, v_str
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def parse_search_array(input_string: str, key: str, value: str) -> str:
|
|
149
|
+
parts = input_string.split("->")
|
|
150
|
+
dict_key = parts[3].strip().replace("'", "").replace(">", "")
|
|
151
|
+
if dict_key.startswith(' '):
|
|
152
|
+
dict_key = dict_key[1:]
|
|
153
|
+
output_sql = (
|
|
154
|
+
f"payload::jsonb -> 'body' -> '{key}' "
|
|
155
|
+
f"@> '[{{\"{dict_key}\": \"{value}\"}}]'"
|
|
156
|
+
)
|
|
157
|
+
return output_sql
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def get_next_date_value(value, format_string):
|
|
161
|
+
from datetime import datetime, timedelta
|
|
162
|
+
if format_string == 'YYYY':
|
|
163
|
+
year = int(value)
|
|
164
|
+
return str(year + 1)
|
|
165
|
+
elif format_string == 'YYYY-MM':
|
|
166
|
+
year, month = map(int, value.split('-'))
|
|
167
|
+
if month == 12:
|
|
168
|
+
return f"{year + 1}-01"
|
|
169
|
+
else:
|
|
170
|
+
return f"{year}-{month + 1:02d}"
|
|
171
|
+
elif format_string == 'YYYY-MM-DD':
|
|
172
|
+
|
|
173
|
+
dt = datetime.strptime(value, '%Y-%m-%d')
|
|
174
|
+
next_dt = dt + timedelta(days=1)
|
|
175
|
+
return next_dt.strftime('%Y-%m-%d')
|
|
176
|
+
elif format_string == 'YYYY-MM-DD"T"HH24':
|
|
177
|
+
from datetime import datetime, timedelta
|
|
178
|
+
dt = datetime.strptime(value, '%Y-%m-%dT%H')
|
|
179
|
+
next_dt = dt + timedelta(hours=1)
|
|
180
|
+
return next_dt.strftime('%Y-%m-%dT%H')
|
|
181
|
+
elif format_string == 'YYYY-MM-DD"T"HH24:MI':
|
|
182
|
+
from datetime import datetime, timedelta
|
|
183
|
+
dt = datetime.strptime(value, '%Y-%m-%dT%H:%M')
|
|
184
|
+
next_dt = dt + timedelta(minutes=1)
|
|
185
|
+
return next_dt.strftime('%Y-%m-%dT%H:%M')
|
|
186
|
+
elif format_string == 'YYYY-MM-DD"T"HH24:MI:SS':
|
|
187
|
+
from datetime import datetime, timedelta
|
|
188
|
+
dt = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S')
|
|
189
|
+
next_dt = dt + timedelta(seconds=1)
|
|
190
|
+
return next_dt.strftime('%Y-%m-%dT%H:%M:%S')
|
|
191
|
+
elif format_string == 'YYYY-MM-DD"T"HH24:MI:SS.US':
|
|
192
|
+
from datetime import datetime, timedelta
|
|
193
|
+
dt = datetime.strptime(value.split('.')[0], '%Y-%m-%dT%H:%M:%S')
|
|
194
|
+
microseconds = int(value.split('.')[1])
|
|
195
|
+
dt = dt.replace(microsecond=microseconds)
|
|
196
|
+
next_dt = dt + timedelta(microseconds=1)
|
|
197
|
+
return next_dt.strftime('%Y-%m-%dT%H:%M:%S.%f')
|
|
198
|
+
|
|
199
|
+
return value
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def is_date_time_value(value):
|
|
204
|
+
patterns = [
|
|
205
|
+
# Full ISO format with microseconds: 2025-04-28T12:28:00.660475
|
|
206
|
+
(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+$', 'YYYY-MM-DD"T"HH24:MI:SS.US'),
|
|
207
|
+
# ISO format without microseconds: 2025-04-28T12:28:00
|
|
208
|
+
(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$', 'YYYY-MM-DD"T"HH24:MI:SS'),
|
|
209
|
+
# ISO format with minutes precision: 2025-04-28T12:28
|
|
210
|
+
(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}$', 'YYYY-MM-DD"T"HH24:MI'),
|
|
211
|
+
# ISO format with hours precision: 2025-04-28T12
|
|
212
|
+
(r'^\d{4}-\d{2}-\d{2}T\d{2}$', 'YYYY-MM-DD"T"HH24'),
|
|
213
|
+
# Date only: 2025-04-28
|
|
214
|
+
(r'^\d{4}-\d{2}-\d{2}$', 'YYYY-MM-DD')
|
|
215
|
+
]
|
|
216
|
+
|
|
217
|
+
for pattern, format_string in patterns:
|
|
218
|
+
if re.match(pattern, value):
|
|
219
|
+
return True, format_string
|
|
220
|
+
|
|
221
|
+
return False, None
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def parse_search_string(string):
|
|
225
|
+
result = {}
|
|
226
|
+
terms = string.split()
|
|
227
|
+
|
|
228
|
+
for term in terms:
|
|
229
|
+
negative = term.startswith('-@')
|
|
230
|
+
|
|
231
|
+
if not (term.startswith('@') or term.startswith('-@')):
|
|
232
|
+
continue
|
|
233
|
+
|
|
234
|
+
parts = term.split(':', 1)
|
|
235
|
+
if len(parts) != 2:
|
|
236
|
+
continue
|
|
237
|
+
|
|
238
|
+
field, value = parts
|
|
239
|
+
field = field[2:] if negative else field[1:]
|
|
240
|
+
|
|
241
|
+
is_range, range_values = validate_search_range(value)
|
|
242
|
+
|
|
243
|
+
if is_range:
|
|
244
|
+
value_type = 'string'
|
|
245
|
+
format_strings = {}
|
|
246
|
+
|
|
247
|
+
all_numeric = True
|
|
248
|
+
for val in range_values:
|
|
249
|
+
is_datetime, format_string = is_date_time_value(val)
|
|
250
|
+
if is_datetime:
|
|
251
|
+
value_type = 'datetime'
|
|
252
|
+
format_strings[val] = format_string
|
|
253
|
+
if not re.match(r'^-?\d+(?:\.\d+)?$', val):
|
|
254
|
+
all_numeric = False
|
|
255
|
+
|
|
256
|
+
if value_type != 'datetime' and all_numeric:
|
|
257
|
+
value_type = 'numeric'
|
|
258
|
+
|
|
259
|
+
field_data = {
|
|
260
|
+
'values': range_values,
|
|
261
|
+
'operation': 'RANGE',
|
|
262
|
+
'negative': negative,
|
|
263
|
+
'is_range': True,
|
|
264
|
+
'range_values': range_values,
|
|
265
|
+
'value_type': value_type
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
if value_type == 'datetime':
|
|
269
|
+
field_data['format_strings'] = format_strings
|
|
270
|
+
|
|
271
|
+
result[field] = field_data
|
|
272
|
+
continue
|
|
273
|
+
|
|
274
|
+
values = value.split('|')
|
|
275
|
+
operation = 'OR' if len(values) > 1 else 'AND'
|
|
276
|
+
|
|
277
|
+
value_type = 'string' # Default type
|
|
278
|
+
format_strings = {}
|
|
279
|
+
all_boolean = True
|
|
280
|
+
|
|
281
|
+
for i, val in enumerate(values):
|
|
282
|
+
is_datetime, format_string = is_date_time_value(val)
|
|
283
|
+
if is_datetime:
|
|
284
|
+
value_type = 'datetime'
|
|
285
|
+
format_strings[val] = format_string
|
|
286
|
+
all_boolean = False
|
|
287
|
+
elif val.lower() not in ['true', 'false']:
|
|
288
|
+
all_boolean = False
|
|
289
|
+
|
|
290
|
+
if all_boolean and value_type == 'string':
|
|
291
|
+
value_type = 'boolean'
|
|
292
|
+
|
|
293
|
+
if field not in result:
|
|
294
|
+
field_data = {
|
|
295
|
+
'values': values,
|
|
296
|
+
'operation': operation,
|
|
297
|
+
'negative': negative,
|
|
298
|
+
'value_type': value_type,
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
if value_type == 'datetime':
|
|
302
|
+
field_data['format_strings'] = format_strings
|
|
303
|
+
|
|
304
|
+
result[field] = field_data
|
|
305
|
+
else:
|
|
306
|
+
if result[field]['negative'] != negative:
|
|
307
|
+
field_data = {
|
|
308
|
+
'values': values,
|
|
309
|
+
'operation': operation,
|
|
310
|
+
'negative': negative
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
if value_type == 'datetime':
|
|
314
|
+
field_data['value_type'] = value_type
|
|
315
|
+
field_data['format_strings'] = format_strings
|
|
316
|
+
|
|
317
|
+
result[field] = field_data
|
|
318
|
+
else:
|
|
319
|
+
result[field]['values'].extend(values)
|
|
320
|
+
if operation == 'OR':
|
|
321
|
+
result[field]['operation'] = 'OR'
|
|
322
|
+
|
|
323
|
+
if value_type == 'datetime':
|
|
324
|
+
result[field]['value_type'] = value_type
|
|
325
|
+
if 'format_strings' not in result[field]:
|
|
326
|
+
result[field]['format_strings'] = {}
|
|
327
|
+
result[field]['format_strings'].update(format_strings)
|
|
328
|
+
return result
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
async def events_query(
|
|
332
|
+
query: api.Query, user_shortname: str | None = None
|
|
333
|
+
) -> tuple[int, list[core.Record]]:
|
|
334
|
+
from utils.access_control import access_control
|
|
335
|
+
|
|
336
|
+
records: list[core.Record] = []
|
|
337
|
+
total: int = 0
|
|
338
|
+
|
|
339
|
+
path = Path(f"{settings.spaces_folder}/{query.space_name}/.dm/events.jsonl")
|
|
340
|
+
if not path.is_file():
|
|
341
|
+
return total, records
|
|
342
|
+
|
|
343
|
+
result = []
|
|
344
|
+
if query.search:
|
|
345
|
+
p = subprocess.Popen(
|
|
346
|
+
["grep", f'"{query.search}"', path], stdout=subprocess.PIPE
|
|
347
|
+
)
|
|
348
|
+
p = subprocess.Popen(
|
|
349
|
+
["tail", "-n", f"{query.limit + query.offset}"],
|
|
350
|
+
stdin=p.stdout,
|
|
351
|
+
stdout=subprocess.PIPE,
|
|
352
|
+
)
|
|
353
|
+
p = subprocess.Popen(["tac"], stdin=p.stdout, stdout=subprocess.PIPE)
|
|
354
|
+
if query.offset > 0:
|
|
355
|
+
p = subprocess.Popen(
|
|
356
|
+
["sed", f"1,{query.offset}d"],
|
|
357
|
+
stdin=p.stdout,
|
|
358
|
+
stdout=subprocess.PIPE,
|
|
359
|
+
)
|
|
360
|
+
r, _ = p.communicate()
|
|
361
|
+
result = list(filter(None, r.decode("utf-8").split("\n")))
|
|
362
|
+
else:
|
|
363
|
+
cmd = f"(tail -n {query.limit + query.offset} {path}; echo) | tac"
|
|
364
|
+
if query.offset > 0:
|
|
365
|
+
cmd += f" | sed '1,{query.offset}d'"
|
|
366
|
+
result = list(
|
|
367
|
+
filter(
|
|
368
|
+
None,
|
|
369
|
+
subprocess.run(
|
|
370
|
+
[cmd], capture_output=True, text=True, shell=True
|
|
371
|
+
).stdout.split("\n"),
|
|
372
|
+
)
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
if query.search:
|
|
376
|
+
p1 = subprocess.Popen(
|
|
377
|
+
["grep", f'"{query.search}"', path], stdout=subprocess.PIPE
|
|
378
|
+
)
|
|
379
|
+
p2 = subprocess.Popen(["wc", "-l"], stdin=p1.stdout, stdout=subprocess.PIPE)
|
|
380
|
+
r, _ = p2.communicate()
|
|
381
|
+
total = int(
|
|
382
|
+
r.decode(),
|
|
383
|
+
10,
|
|
384
|
+
)
|
|
385
|
+
else:
|
|
386
|
+
total = int(
|
|
387
|
+
subprocess.run(
|
|
388
|
+
[f"wc -l < {path}"],
|
|
389
|
+
capture_output=True,
|
|
390
|
+
text=True,
|
|
391
|
+
shell=True,
|
|
392
|
+
).stdout,
|
|
393
|
+
10,
|
|
394
|
+
)
|
|
395
|
+
for line in result:
|
|
396
|
+
action_obj = json.loads(line)
|
|
397
|
+
if (
|
|
398
|
+
query.from_date
|
|
399
|
+
and str_to_datetime(action_obj["timestamp"]) < query.from_date
|
|
400
|
+
):
|
|
401
|
+
continue
|
|
402
|
+
|
|
403
|
+
if query.to_date and str_to_datetime(action_obj["timestamp"]) > query.to_date:
|
|
404
|
+
break
|
|
405
|
+
|
|
406
|
+
if not await access_control.check_access(
|
|
407
|
+
user_shortname=str(user_shortname),
|
|
408
|
+
space_name=query.space_name,
|
|
409
|
+
subpath=action_obj.get(
|
|
410
|
+
"resource", {}).get("subpath", "/"),
|
|
411
|
+
resource_type=action_obj["resource"]["type"],
|
|
412
|
+
action_type=core.ActionType(action_obj["request"]),
|
|
413
|
+
):
|
|
414
|
+
continue
|
|
415
|
+
|
|
416
|
+
records.append(
|
|
417
|
+
core.Record(
|
|
418
|
+
resource_type=action_obj["resource"]["type"],
|
|
419
|
+
shortname=action_obj["resource"]["shortname"],
|
|
420
|
+
subpath=action_obj["resource"]["subpath"],
|
|
421
|
+
attributes=action_obj,
|
|
422
|
+
),
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
return total, records
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def set_results_from_aggregation(query, item, results, idx):
|
|
429
|
+
extra = {}
|
|
430
|
+
for key, value in item._mapping.items():
|
|
431
|
+
if not hasattr(Aggregated, key):
|
|
432
|
+
extra[key] = value
|
|
433
|
+
|
|
434
|
+
results[idx] = Aggregated.model_validate(item).to_record(
|
|
435
|
+
query.subpath,
|
|
436
|
+
(
|
|
437
|
+
str(getattr(item, "shortname"))
|
|
438
|
+
if hasattr(item, "shortname") and isinstance(item.shortname, str)
|
|
439
|
+
else "/"
|
|
440
|
+
),
|
|
441
|
+
extra=extra,
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
return results
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
def set_table_for_query(query):
|
|
448
|
+
if query.type is QueryType.spaces:
|
|
449
|
+
return Spaces
|
|
450
|
+
elif query.type is QueryType.history:
|
|
451
|
+
return Histories
|
|
452
|
+
elif query.space_name == "management":
|
|
453
|
+
match query.subpath:
|
|
454
|
+
case "/users":
|
|
455
|
+
return Users
|
|
456
|
+
case "/roles":
|
|
457
|
+
return Roles
|
|
458
|
+
case "/permissions":
|
|
459
|
+
return Permissions
|
|
460
|
+
case _:
|
|
461
|
+
return Entries
|
|
462
|
+
else:
|
|
463
|
+
return Entries
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
def build_query_filter_for_allowed_field_values(perm_value) -> str:
|
|
467
|
+
filters = []
|
|
468
|
+
|
|
469
|
+
for k, v in perm_value.items():
|
|
470
|
+
if isinstance(v, str):
|
|
471
|
+
filters.append(f"@{k}:{v}")
|
|
472
|
+
elif isinstance(v, list) and v:
|
|
473
|
+
flat_values = []
|
|
474
|
+
for item in v:
|
|
475
|
+
if isinstance(item, list):
|
|
476
|
+
for sub in item:
|
|
477
|
+
if isinstance(sub, str) and sub:
|
|
478
|
+
flat_values.append(sub)
|
|
479
|
+
elif isinstance(item, str) and item:
|
|
480
|
+
flat_values.append(item)
|
|
481
|
+
if flat_values:
|
|
482
|
+
seen_vals = set()
|
|
483
|
+
uniq_flat_values = []
|
|
484
|
+
for val in flat_values:
|
|
485
|
+
if val not in seen_vals:
|
|
486
|
+
seen_vals.add(val)
|
|
487
|
+
uniq_flat_values.append(val)
|
|
488
|
+
values = "|".join(uniq_flat_values)
|
|
489
|
+
filters.append(f"@{k}:{values}")
|
|
490
|
+
|
|
491
|
+
return " ".join(filters)
|