dmart 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. alembic/__init__.py +0 -0
  2. alembic/env.py +91 -0
  3. alembic/scripts/__init__.py +0 -0
  4. alembic/scripts/calculate_checksums.py +77 -0
  5. alembic/scripts/migration_f7a4949eed19.py +28 -0
  6. alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
  7. alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
  8. alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
  9. alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
  10. alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
  11. alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
  12. alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
  13. alembic/versions/74288ccbd3b5_initial.py +264 -0
  14. alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
  15. alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
  16. alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
  17. alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
  18. alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
  19. alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
  20. alembic/versions/__init__.py +0 -0
  21. alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
  22. alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
  23. alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
  24. api/__init__.py +0 -0
  25. api/info/__init__.py +0 -0
  26. api/info/router.py +109 -0
  27. api/managed/__init__.py +0 -0
  28. api/managed/router.py +1541 -0
  29. api/managed/utils.py +1850 -0
  30. api/public/__init__.py +0 -0
  31. api/public/router.py +758 -0
  32. api/qr/__init__.py +0 -0
  33. api/qr/router.py +108 -0
  34. api/user/__init__.py +0 -0
  35. api/user/model/__init__.py +0 -0
  36. api/user/model/errors.py +14 -0
  37. api/user/model/requests.py +165 -0
  38. api/user/model/responses.py +11 -0
  39. api/user/router.py +1401 -0
  40. api/user/service.py +270 -0
  41. bundler.py +44 -0
  42. config/__init__.py +0 -0
  43. config/channels.json +11 -0
  44. config/notification.json +17 -0
  45. data_adapters/__init__.py +0 -0
  46. data_adapters/adapter.py +16 -0
  47. data_adapters/base_data_adapter.py +467 -0
  48. data_adapters/file/__init__.py +0 -0
  49. data_adapters/file/adapter.py +2043 -0
  50. data_adapters/file/adapter_helpers.py +1013 -0
  51. data_adapters/file/archive.py +150 -0
  52. data_adapters/file/create_index.py +331 -0
  53. data_adapters/file/create_users_folders.py +52 -0
  54. data_adapters/file/custom_validations.py +68 -0
  55. data_adapters/file/drop_index.py +40 -0
  56. data_adapters/file/health_check.py +560 -0
  57. data_adapters/file/redis_services.py +1110 -0
  58. data_adapters/helpers.py +27 -0
  59. data_adapters/sql/__init__.py +0 -0
  60. data_adapters/sql/adapter.py +3210 -0
  61. data_adapters/sql/adapter_helpers.py +491 -0
  62. data_adapters/sql/create_tables.py +451 -0
  63. data_adapters/sql/create_users_folders.py +53 -0
  64. data_adapters/sql/db_to_json_migration.py +482 -0
  65. data_adapters/sql/health_check_sql.py +232 -0
  66. data_adapters/sql/json_to_db_migration.py +454 -0
  67. data_adapters/sql/update_query_policies.py +101 -0
  68. data_generator.py +81 -0
  69. dmart-0.1.9.dist-info/METADATA +64 -0
  70. dmart-0.1.9.dist-info/RECORD +149 -0
  71. dmart-0.1.9.dist-info/WHEEL +5 -0
  72. dmart-0.1.9.dist-info/entry_points.txt +2 -0
  73. dmart-0.1.9.dist-info/top_level.txt +23 -0
  74. dmart.py +513 -0
  75. get_settings.py +7 -0
  76. languages/__init__.py +0 -0
  77. languages/arabic.json +15 -0
  78. languages/english.json +16 -0
  79. languages/kurdish.json +14 -0
  80. languages/loader.py +13 -0
  81. main.py +506 -0
  82. migrate.py +24 -0
  83. models/__init__.py +0 -0
  84. models/api.py +203 -0
  85. models/core.py +597 -0
  86. models/enums.py +255 -0
  87. password_gen.py +8 -0
  88. plugins/__init__.py +0 -0
  89. plugins/action_log/__init__.py +0 -0
  90. plugins/action_log/plugin.py +121 -0
  91. plugins/admin_notification_sender/__init__.py +0 -0
  92. plugins/admin_notification_sender/plugin.py +124 -0
  93. plugins/ldap_manager/__init__.py +0 -0
  94. plugins/ldap_manager/plugin.py +100 -0
  95. plugins/local_notification/__init__.py +0 -0
  96. plugins/local_notification/plugin.py +123 -0
  97. plugins/realtime_updates_notifier/__init__.py +0 -0
  98. plugins/realtime_updates_notifier/plugin.py +58 -0
  99. plugins/redis_db_update/__init__.py +0 -0
  100. plugins/redis_db_update/plugin.py +188 -0
  101. plugins/resource_folders_creation/__init__.py +0 -0
  102. plugins/resource_folders_creation/plugin.py +81 -0
  103. plugins/system_notification_sender/__init__.py +0 -0
  104. plugins/system_notification_sender/plugin.py +188 -0
  105. plugins/update_access_controls/__init__.py +0 -0
  106. plugins/update_access_controls/plugin.py +9 -0
  107. pytests/__init__.py +0 -0
  108. pytests/api_user_models_erros_test.py +16 -0
  109. pytests/api_user_models_requests_test.py +98 -0
  110. pytests/archive_test.py +72 -0
  111. pytests/base_test.py +300 -0
  112. pytests/get_settings_test.py +14 -0
  113. pytests/json_to_db_migration_test.py +237 -0
  114. pytests/service_test.py +26 -0
  115. pytests/test_info.py +55 -0
  116. pytests/test_status.py +15 -0
  117. run_notification_campaign.py +98 -0
  118. scheduled_notification_handler.py +121 -0
  119. schema_migration.py +208 -0
  120. schema_modulate.py +192 -0
  121. set_admin_passwd.py +55 -0
  122. sync.py +202 -0
  123. utils/__init__.py +0 -0
  124. utils/access_control.py +306 -0
  125. utils/async_request.py +8 -0
  126. utils/exporter.py +309 -0
  127. utils/firebase_notifier.py +57 -0
  128. utils/generate_email.py +38 -0
  129. utils/helpers.py +352 -0
  130. utils/hypercorn_config.py +12 -0
  131. utils/internal_error_code.py +60 -0
  132. utils/jwt.py +124 -0
  133. utils/logger.py +167 -0
  134. utils/middleware.py +99 -0
  135. utils/notification.py +75 -0
  136. utils/password_hashing.py +16 -0
  137. utils/plugin_manager.py +215 -0
  138. utils/query_policies_helper.py +112 -0
  139. utils/regex.py +44 -0
  140. utils/repository.py +529 -0
  141. utils/router_helper.py +19 -0
  142. utils/settings.py +165 -0
  143. utils/sms_notifier.py +21 -0
  144. utils/social_sso.py +67 -0
  145. utils/templates/activation.html.j2 +26 -0
  146. utils/templates/reminder.html.j2 +17 -0
  147. utils/ticket_sys_utils.py +203 -0
  148. utils/web_notifier.py +29 -0
  149. websocket.py +231 -0
@@ -0,0 +1,3210 @@
1
+ import asyncio
2
+ import json
3
+ import os
4
+ import sys
5
+ import time
6
+ import hashlib
7
+ from contextlib import asynccontextmanager
8
+ from copy import copy
9
+ from datetime import datetime
10
+ from pathlib import Path
11
+ from typing import Any, Type, Tuple
12
+ from uuid import uuid4
13
+ import ast
14
+ from fastapi import status
15
+ from fastapi.logger import logger
16
+ from sqlalchemy import literal_column, or_
17
+ from sqlalchemy.orm import sessionmaker, defer
18
+ from sqlmodel import Session, select, col, delete, update, Integer, Float, Boolean, func, text
19
+ from sqlalchemy import String, cast, bindparam
20
+ import io
21
+ from sys import modules as sys_modules
22
+ import models.api as api
23
+ from models.api import Exception as API_Exception, Error as API_Error
24
+ import models.core as core
25
+ from models.enums import QueryType, LockAction, ResourceType, SortType
26
+ from data_adapters.sql.create_tables import (
27
+ Entries,
28
+ Histories,
29
+ Permissions,
30
+ Roles,
31
+ Users,
32
+ Spaces,
33
+ Attachments,
34
+ Locks,
35
+ Sessions,
36
+ Invitations,
37
+ URLShorts,
38
+ OTP,
39
+ )
40
+ from utils.helpers import (
41
+ arr_remove_common,
42
+ get_removed_items,
43
+ camel_case, resolve_schema_references,
44
+ )
45
+ from utils.internal_error_code import InternalErrorCode
46
+ from utils.middleware import get_request_data
47
+ from utils.password_hashing import hash_password, verify_password
48
+ from utils.query_policies_helper import get_user_query_policies, generate_query_policies
49
+ from utils.settings import settings
50
+ from data_adapters.base_data_adapter import BaseDataAdapter, MetaChild
51
+ from data_adapters.sql.adapter_helpers import (
52
+ set_results_from_aggregation, set_table_for_query, events_query,
53
+ subpath_checker, parse_search_string,
54
+ sqlite_aggregate_functions, mysql_aggregate_functions,
55
+ postgres_aggregate_functions, transform_keys_to_sql,
56
+ get_next_date_value, is_date_time_value,
57
+ # build_query_filter_for_allowed_field_values
58
+ )
59
+ from data_adapters.helpers import get_nested_value, trans_magic_words
60
+ from jsonschema import Draft7Validator
61
+ from starlette.datastructures import UploadFile
62
+ from sqlalchemy import URL
63
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
64
+
65
+
66
+ def query_attachment_aggregation(subpath):
67
+ return select(
68
+ literal_column("resource_type").label("resource_type"),
69
+ func.count(text("*")).label("count")
70
+ ).group_by(text("resource_type")) \
71
+ .where(col(Attachments.subpath) == subpath)
72
+
73
+
74
+ def query_aggregation(table, query):
75
+ aggregate_functions: list = []
76
+
77
+ if "sqlite" in settings.database_driver:
78
+ aggregate_functions = sqlite_aggregate_functions
79
+ elif "mysql" in settings.database_driver:
80
+ aggregate_functions = mysql_aggregate_functions
81
+ elif "postgresql" in settings.database_driver:
82
+ aggregate_functions = postgres_aggregate_functions
83
+
84
+ def _normalize_json_path(path: str) -> str:
85
+ if path.startswith("@"):
86
+ path = path[1:]
87
+ if path.startswith("body."):
88
+ return f"payload.{path}"
89
+ return path
90
+
91
+ def _selectable_for_load(item: str):
92
+ if item.startswith("@"):
93
+ col_name = item.replace("@", "")
94
+ return getattr(table, col_name)
95
+
96
+ if hasattr(table, item):
97
+ return getattr(table, item)
98
+
99
+ json_path = _normalize_json_path(item)
100
+ expr = transform_keys_to_sql(json_path)
101
+ alias = item.replace(".", "_")
102
+ return text(expr).label(alias)
103
+
104
+ statement = select(*[_selectable_for_load(ll) for ll in query.aggregation_data.load])
105
+
106
+ if bool(query.aggregation_data.group_by):
107
+ group_by_exprs = []
108
+ for gb in query.aggregation_data.group_by:
109
+ if gb.startswith("@"):
110
+ group_by_exprs.append(table.__dict__[gb.replace("@", "")])
111
+ elif hasattr(table, gb):
112
+ group_by_exprs.append(getattr(table, gb))
113
+ else:
114
+ json_path = _normalize_json_path(gb)
115
+ expr = transform_keys_to_sql(json_path)
116
+ group_by_exprs.append(text(expr))
117
+ if group_by_exprs:
118
+ statement = statement.group_by(*group_by_exprs)
119
+
120
+ if bool(query.aggregation_data.reducers):
121
+ agg_selects = []
122
+ for reducer in query.aggregation_data.reducers:
123
+ if reducer.reducer_name in aggregate_functions:
124
+ field_expr_str: str
125
+ if len(reducer.args) == 0:
126
+ field_expr_str = "*"
127
+ else:
128
+ arg0 = reducer.args[0]
129
+ arg0 = _normalize_json_path(arg0)
130
+ base_arg = arg0
131
+ if hasattr(table, base_arg):
132
+ field = getattr(table, base_arg)
133
+ if field is None:
134
+ continue
135
+ if isinstance(field.type, Integer) or isinstance(field.type, Boolean):
136
+ field_expr_str = f"{field}::int"
137
+ elif isinstance(field.type, Float):
138
+ field_expr_str = f"{field}::float"
139
+ else:
140
+ field_expr_str = f"{field}::text"
141
+ else:
142
+ jp = transform_keys_to_sql(arg0)
143
+ if reducer.reducer_name in ("sum", "avg", "total"):
144
+ field_expr_str = f"({jp})::float"
145
+ elif reducer.reducer_name in ("count", "r_count"):
146
+ field_expr_str = "*"
147
+ elif reducer.reducer_name in ("min", "max", "group_concat"):
148
+ field_expr_str = f"({jp})::text"
149
+ else:
150
+ field_expr_str = f"({jp})"
151
+ agg_selects.append(
152
+ getattr(func, reducer.reducer_name)(text(field_expr_str)).label(reducer.alias)
153
+ )
154
+ if agg_selects:
155
+ cols = list(statement.selected_columns) + agg_selects
156
+ statement = statement.with_only_columns(*cols)
157
+ return statement
158
+
159
+
160
+ def string_to_list(input_str):
161
+ if isinstance(input_str, list):
162
+ return input_str
163
+ try:
164
+ result = ast.literal_eval(input_str)
165
+ if isinstance(result, list):
166
+ return result
167
+ except (ValueError, SyntaxError):
168
+ return [input_str]
169
+
170
+
171
+ def apply_acl_and_query_policies(statement, table, user_shortname, user_query_policies):
172
+ if table not in [Attachments, Histories] and hasattr(table, 'query_policies'):
173
+ access_conditions = [
174
+ "owner_shortname = :user_shortname",
175
+ "EXISTS (SELECT 1 FROM jsonb_array_elements(CASE WHEN jsonb_typeof(acl::jsonb) = 'array' THEN acl::jsonb ELSE '[]'::jsonb END) AS elem WHERE elem->>'user_shortname' = :user_shortname AND (elem->'allowed_actions') ? 'query')"
176
+ ]
177
+
178
+ if user_query_policies:
179
+ raw_items = [str(p) for p in user_query_policies]
180
+ patterns = []
181
+ for item in raw_items:
182
+ for part in str(item).split('|'):
183
+ part = part.strip()
184
+ if part:
185
+ patterns.append(part.replace('*', '%'))
186
+
187
+ seen = set()
188
+ dedup_patterns = []
189
+ for pat in patterns:
190
+ if pat not in seen:
191
+ seen.add(pat)
192
+ dedup_patterns.append(pat)
193
+
194
+ if dedup_patterns:
195
+ like_clauses = []
196
+ like_params = {}
197
+ for idx, pat in enumerate(dedup_patterns):
198
+ param_name = f"qp_like_{idx}"
199
+ like_clauses.append(f"qp LIKE :{param_name}")
200
+ like_params[param_name] = pat
201
+
202
+ qp_exists = "EXISTS (SELECT 1 FROM unnest(query_policies) AS qp WHERE " + " OR ".join(like_clauses) + ")"
203
+ access_conditions.insert(1, qp_exists)
204
+
205
+ clause_str = "(" + " OR ".join(access_conditions) + ")"
206
+ access_filter = text(clause_str)
207
+ statement = statement.where(access_filter).params(
208
+ user_shortname=user_shortname,
209
+ **like_params
210
+ )
211
+ else:
212
+ clause_str = "(" + " OR ".join(access_conditions) + ")"
213
+ access_filter = text(clause_str)
214
+ statement = statement.where(access_filter).params(user_shortname=user_shortname)
215
+ else:
216
+ clause_str = "(" + " OR ".join(access_conditions) + ")"
217
+ access_filter = text(clause_str)
218
+ statement = statement.where(access_filter).params(user_shortname=user_shortname)
219
+ return statement
220
+
221
+
222
+ async def set_sql_statement_from_query(table, statement, query, is_for_count):
223
+ try:
224
+ if query.type == QueryType.attachments_aggregation and not is_for_count:
225
+ return query_attachment_aggregation(query.subpath)
226
+
227
+ if query.type == QueryType.aggregation and not is_for_count:
228
+ statement = query_aggregation(table, query)
229
+
230
+ if query.type == QueryType.tags and not is_for_count:
231
+ if query.retrieve_json_payload:
232
+ statement = select(
233
+ func.jsonb_array_elements_text(table.tags).label('tag'),
234
+ func.count('*').label('count')
235
+ ).group_by('tag')
236
+ else:
237
+ statement = select(func.jsonb_array_elements_text(table.tags).label('tag')).distinct()
238
+
239
+ except Exception as e:
240
+ print("[!query]", e)
241
+ raise api.Exception(
242
+ status_code=status.HTTP_400_BAD_REQUEST,
243
+ error=api.Error(
244
+ type="query",
245
+ code=InternalErrorCode.SOMETHING_WRONG,
246
+ message=str(e),
247
+ ),
248
+ )
249
+
250
+ if query.space_name:
251
+ statement = statement.where(table.space_name == query.space_name)
252
+ if query.subpath and table in [Entries, Attachments]:
253
+ if query.exact_subpath:
254
+ statement = statement.where(table.subpath == query.subpath)
255
+ else:
256
+ # Use bind parameter for the ILIKE pattern to avoid string interpolation
257
+ subpath_like = (f"{query.subpath}/%".replace('//', '/'))
258
+ statement = statement.where(
259
+ or_(
260
+ table.subpath == query.subpath,
261
+ text("subpath ILIKE :subpath_like").bindparams(bindparam("subpath_like"))
262
+ )
263
+ ).params(subpath_like=subpath_like)
264
+ if query.search:
265
+ if not query.search.startswith("@") and not query.search.startswith("-"):
266
+ p = "shortname || ' ' || tags || ' ' || displayname || ' ' || description || ' ' || payload"
267
+ if table is Users:
268
+ p += " || ' ' || COALESCE(email, '') || ' ' || COALESCE(msisdn, '') || ' ' || roles"
269
+ if table is Roles:
270
+ p += " || ' ' || permissions"
271
+ # Parameterize search string
272
+ statement = statement.where(
273
+ text("(" + p + ") ILIKE :search")
274
+ ).params(search=f"%{query.search}%")
275
+ else:
276
+ search_tokens = parse_search_string(query.search)
277
+
278
+ try:
279
+ table_columns = set(c.name for c in table.__table__.columns) # type: ignore[attr-defined]
280
+ except Exception:
281
+ table_columns = set()
282
+
283
+ def _field_exists_in_table(_field: str) -> bool:
284
+ if _field in table_columns:
285
+ return True
286
+ if _field.startswith('payload.') and 'payload' in table_columns:
287
+ return True
288
+ if _field.startswith('payload.body.') and 'payload' in table_columns:
289
+ return True
290
+ return False
291
+
292
+ for field, field_data in search_tokens.items():
293
+ if not _field_exists_in_table(field):
294
+ continue
295
+ values = field_data['values']
296
+ operation = field_data['operation']
297
+ negative = field_data.get('negative', False)
298
+ value_type = field_data.get('value_type', 'string')
299
+ format_strings = field_data.get('format_strings', {})
300
+
301
+ if not values:
302
+ continue
303
+
304
+ if field.startswith('payload.body.'):
305
+ payload_field = field.replace('payload.body.', '')
306
+ payload_path = '->'.join([f"'{part}'" for part in payload_field.split('.')])
307
+
308
+ payload_path_splited = payload_path.split('->')
309
+ if len(payload_path_splited) > 1:
310
+ _nested_no_last = '->'.join(payload_path_splited[:-1])
311
+ _last = payload_path_splited[-1]
312
+ _payload_text_extract = f"payload::jsonb->'body'->{_nested_no_last}->>{_last}"
313
+ else:
314
+ _payload_text_extract = f"payload::jsonb->'body'->>{payload_path}"
315
+ conditions = []
316
+
317
+ if value_type == 'numeric' and field_data.get('is_range', False) and len(
318
+ field_data.get('range_values', [])) == 2:
319
+ val1, val2 = field_data['range_values']
320
+ try:
321
+ num1 = float(val1)
322
+ num2 = float(val2)
323
+ if num1 > num2:
324
+ val1, val2 = val2, val1
325
+ except ValueError:
326
+ pass
327
+ if negative:
328
+ conditions.append(
329
+ f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND (payload::jsonb->'body'->>{payload_path})::float NOT BETWEEN {val1} AND {val2})")
330
+ else:
331
+ conditions.append(
332
+ f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND (payload::jsonb->'body'->>{payload_path})::float BETWEEN {val1} AND {val2})")
333
+
334
+ for value in values:
335
+ if value_type == 'datetime':
336
+ if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
337
+ range_values = field_data['range_values']
338
+ val1, val2 = range_values
339
+ if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
340
+ fmt1 = format_strings.get(val1)
341
+ fmt2 = format_strings.get(val2)
342
+ if fmt1 and fmt2:
343
+ if fmt1 == fmt2:
344
+ if val1 > val2:
345
+ val1, val2 = val2, val1
346
+ else:
347
+ try:
348
+ from datetime import datetime
349
+ dt1 = datetime.strptime(val1, fmt1.replace('YYYY', '%Y').replace('MM',
350
+ '%m').replace(
351
+ 'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
352
+ 'SS', '%S').replace('US', '%f'))
353
+ dt2 = datetime.strptime(val2, fmt2.replace('YYYY', '%Y').replace('MM',
354
+ '%m').replace(
355
+ 'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
356
+ 'SS', '%S').replace('US', '%f'))
357
+ if dt1 > dt2:
358
+ val1, val2 = val2, val1
359
+ except Exception:
360
+ if val1 > val2:
361
+ val1, val2 = val2, val1
362
+ else:
363
+ if val1 > val2:
364
+ val1, val2 = val2, val1
365
+
366
+ start_value, end_value = val1, val2
367
+ start_format = format_strings.get(start_value)
368
+ end_format = format_strings.get(end_value)
369
+
370
+ if start_format and end_format:
371
+ if negative:
372
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
373
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::text NOT BETWEEN '{start_value}' AND '{end_value}')"
374
+ conditions.append(f"({string_condition} OR {fallback_condition})")
375
+ else:
376
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
377
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::text BETWEEN '{start_value}' AND '{end_value}')"
378
+ conditions.append(f"({string_condition} OR {fallback_condition})")
379
+ else:
380
+ format_string = format_strings.get(value)
381
+ if format_string:
382
+ next_value = get_next_date_value(value, format_string)
383
+
384
+ if negative:
385
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND (TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{value}', '{format_string}') OR TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{next_value}', '{format_string}')))"
386
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND (({_payload_text_extract})::text < '{value}' OR ({_payload_text_extract})::text >= '{next_value}'))"
387
+ conditions.append(f"({string_condition} OR {fallback_condition})")
388
+ else:
389
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{value}', '{format_string}') AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{next_value}', '{format_string}'))"
390
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::text >= '{value}' AND ({_payload_text_extract})::text < '{next_value}')"
391
+ conditions.append(f"({string_condition} OR {fallback_condition})")
392
+ elif value_type == 'boolean':
393
+ for value in values:
394
+ bool_value = value.lower()
395
+ if negative:
396
+ bool_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'boolean' AND ({_payload_text_extract})::boolean != {bool_value})"
397
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::boolean != {bool_value})"
398
+ conditions.append(f"({bool_condition} OR {string_condition})")
399
+ else:
400
+ bool_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'boolean' AND ({_payload_text_extract})::boolean = {bool_value})"
401
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::boolean = {bool_value})"
402
+ conditions.append(f"({bool_condition} OR {string_condition})")
403
+ else:
404
+ is_numeric = False
405
+ if value.isnumeric():
406
+ is_numeric = True
407
+
408
+ if negative:
409
+ array_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'array' AND NOT (payload::jsonb->'body'->{payload_path} @> '[\"{value}\"]'::jsonb))"
410
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND {_payload_text_extract} != '{value}')"
411
+
412
+ if is_numeric:
413
+ number_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND ({_payload_text_extract})::float != {value})"
414
+ conditions.append(
415
+ f"({array_condition} OR {string_condition} OR {number_condition})")
416
+ else:
417
+ conditions.append(f"({array_condition} OR {string_condition})")
418
+ else:
419
+ array_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'array' AND payload::jsonb->'body'->{payload_path} @> '[\"{value}\"]'::jsonb)"
420
+ string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND {_payload_text_extract} = '{value}')"
421
+ direct_condition = f"(payload::jsonb->'body'->{payload_path} = '\"{value}\"'::jsonb)"
422
+
423
+ if is_numeric:
424
+ number_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND ({_payload_text_extract})::float = {value})"
425
+ conditions.append(
426
+ f"({array_condition} OR {string_condition} OR {direct_condition} OR {number_condition})")
427
+ else:
428
+ conditions.append(
429
+ f"({array_condition} OR {string_condition} OR {direct_condition})")
430
+
431
+ if conditions:
432
+ if negative:
433
+ join_operator = " OR " if operation == 'AND' else " AND "
434
+ else:
435
+ join_operator = " AND " if operation == 'AND' else " OR "
436
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
437
+ elif field.startswith('payload.'):
438
+ payload_field = field.replace('payload.', '')
439
+ payload_path = '->'.join([f"'{part}'" for part in payload_field.split('.')])
440
+
441
+ payload_path_splited = payload_path.split('->')
442
+ if len(payload_path_splited) > 1:
443
+ _nested_no_last = '->'.join(payload_path_splited[:-1])
444
+ _last = payload_path_splited[-1]
445
+ _payload_text_extract = f"payload::jsonb->{_nested_no_last}->>{_last}"
446
+ else:
447
+ _payload_text_extract = f"payload::jsonb->>{payload_path}"
448
+
449
+ conditions = []
450
+
451
+ if value_type == 'numeric' and field_data.get('is_range', False) and len(
452
+ field_data.get('range_values', [])) == 2:
453
+ val1, val2 = field_data['range_values']
454
+ try:
455
+ num1 = float(val1)
456
+ num2 = float(val2)
457
+ if num1 > num2:
458
+ val1, val2 = val2, val1
459
+ except ValueError:
460
+ pass
461
+ if negative:
462
+ conditions.append(
463
+ f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float NOT BETWEEN {val1} AND {val2})")
464
+ else:
465
+ conditions.append(
466
+ f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float BETWEEN {val1} AND {val2})")
467
+
468
+ for value in values:
469
+ if value_type == 'datetime':
470
+ if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
471
+ range_values = field_data['range_values']
472
+ val1, val2 = range_values
473
+ if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
474
+ fmt1 = format_strings.get(val1)
475
+ fmt2 = format_strings.get(val2)
476
+ if fmt1 and fmt2:
477
+ if fmt1 == fmt2:
478
+ if val1 > val2:
479
+ val1, val2 = val2, val1
480
+ else:
481
+ try:
482
+ from datetime import datetime
483
+ dt1 = datetime.strptime(val1, fmt1.replace('YYYY', '%Y').replace('MM',
484
+ '%m').replace(
485
+ 'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
486
+ 'SS', '%S').replace('US', '%f'))
487
+ dt2 = datetime.strptime(val2, fmt2.replace('YYYY', '%Y').replace('MM',
488
+ '%m').replace(
489
+ 'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
490
+ 'SS', '%S').replace('US', '%f'))
491
+ if dt1 > dt2:
492
+ val1, val2 = val2, val1
493
+ except Exception:
494
+ if val1 > val2:
495
+ val1, val2 = val2, val1
496
+ else:
497
+ if val1 > val2:
498
+ val1, val2 = val2, val1
499
+
500
+ start_value, end_value = val1, val2
501
+ start_format = format_strings.get(start_value)
502
+ end_format = format_strings.get(end_value)
503
+
504
+ if start_format and end_format:
505
+ if negative:
506
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
507
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND ({_payload_text_extract})::text NOT BETWEEN '{start_value}' AND '{end_value}')"
508
+ conditions.append(f"({string_condition} OR {fallback_condition})")
509
+ else:
510
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
511
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND ({_payload_text_extract})::text BETWEEN '{start_value}' AND '{end_value}')"
512
+ conditions.append(f"({string_condition} OR {fallback_condition})")
513
+ else:
514
+ format_string = format_strings.get(value)
515
+ if format_string:
516
+ next_value = get_next_date_value(value, format_string)
517
+
518
+ if negative:
519
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND (TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{value}', '{format_string}') OR TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{next_value}', '{format_string}')))"
520
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND (({_payload_text_extract})::text < '{value}' OR ({_payload_text_extract})::text >= '{next_value}'))"
521
+ conditions.append(f"({string_condition} OR {fallback_condition})")
522
+ else:
523
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{value}', '{format_string}') AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{next_value}', '{format_string}'))"
524
+ fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND ({_payload_text_extract})::text >= '{value}' AND ({_payload_text_extract})::text < '{next_value}')"
525
+ conditions.append(f"({string_condition} OR {fallback_condition})")
526
+ else:
527
+ is_numeric = False
528
+ if value.isnumeric():
529
+ is_numeric = True
530
+
531
+ if negative:
532
+ array_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'array' AND NOT (payload::jsonb->{payload_path} @> '[\"{value}\"]'::jsonb))"
533
+ if '*' in value:
534
+ pattern = value.replace('*', '%')
535
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} NOT ILIKE '{pattern}')"
536
+ else:
537
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} != '{value}')"
538
+
539
+ if is_numeric:
540
+ number_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float != {value})"
541
+ conditions.append(
542
+ f"({array_condition} OR {string_condition} OR {number_condition})")
543
+ else:
544
+ conditions.append(f"({array_condition} OR {string_condition})")
545
+ else:
546
+ array_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'array' AND payload::jsonb->{payload_path} @> '[\"{value}\"]'::jsonb)"
547
+ if '*' in value:
548
+ pattern = value.replace('*', '%')
549
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} ILIKE '{pattern}')"
550
+ else:
551
+ string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} = '{value}')"
552
+ direct_condition = f"(payload::jsonb->{payload_path} = '\"{value}\"'::jsonb)"
553
+
554
+ if is_numeric:
555
+ number_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float = {value})"
556
+ conditions.append(
557
+ f"({array_condition} OR {string_condition} OR {direct_condition} OR {number_condition})")
558
+ else:
559
+ conditions.append(
560
+ f"({array_condition} OR {string_condition} OR {direct_condition})")
561
+
562
+ if conditions:
563
+ if negative:
564
+ join_operator = " OR " if operation == 'AND' else " AND "
565
+ else:
566
+ join_operator = " AND " if operation == 'AND' else " OR "
567
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
568
+ else:
569
+ try:
570
+ if hasattr(table, field):
571
+ field_obj = getattr(table, field)
572
+ if hasattr(field_obj, 'type') and str(field_obj.type).lower() == 'jsonb':
573
+ conditions = []
574
+ for value in values:
575
+ if negative:
576
+ array_condition = f"(jsonb_typeof({field}) = 'array' AND NOT ({field} @> '[\"{value}\"]'::jsonb))"
577
+ object_condition = f"(jsonb_typeof({field}) = 'object' AND NOT ({field}::text ILIKE '%{value}%'))"
578
+ conditions.append(f"({array_condition} OR {object_condition})")
579
+ else:
580
+ array_condition = f"(jsonb_typeof({field}) = 'array' AND {field} @> '[\"{value}\"]'::jsonb)"
581
+ object_condition = f"(jsonb_typeof({field}) = 'object' AND {field}::text ILIKE '%{value}%')"
582
+ conditions.append(f"({array_condition} OR {object_condition})")
583
+
584
+ if conditions:
585
+ if negative:
586
+ join_operator = " OR " if operation == 'AND' else " AND "
587
+ else:
588
+ join_operator = " AND " if operation == 'AND' else " OR "
589
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
590
+ elif value_type == 'datetime':
591
+ conditions = []
592
+
593
+ if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
594
+ range_values = field_data['range_values']
595
+ val1, val2 = range_values
596
+ if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
597
+ fmt1 = format_strings.get(val1)
598
+ fmt2 = format_strings.get(val2)
599
+ if fmt1 and fmt2:
600
+ if fmt1 == fmt2:
601
+ if val1 > val2:
602
+ val1, val2 = val2, val1
603
+ else:
604
+ try:
605
+ from datetime import datetime
606
+ dt1 = datetime.strptime(val1,
607
+ fmt1.replace('YYYY', '%Y').replace('MM',
608
+ '%m').replace(
609
+ 'DD', '%d').replace('"T"HH24',
610
+ 'T%H').replace('MI',
611
+ '%M').replace(
612
+ 'SS', '%S').replace('US', '%f'))
613
+ dt2 = datetime.strptime(val2,
614
+ fmt2.replace('YYYY', '%Y').replace('MM',
615
+ '%m').replace(
616
+ 'DD', '%d').replace('"T"HH24',
617
+ 'T%H').replace('MI',
618
+ '%M').replace(
619
+ 'SS', '%S').replace('US', '%f'))
620
+ if dt1 > dt2:
621
+ val1, val2 = val2, val1
622
+ except Exception:
623
+ if val1 > val2:
624
+ val1, val2 = val2, val1
625
+ else:
626
+ if val1 > val2:
627
+ val1, val2 = val2, val1
628
+
629
+ start_value, end_value = val1, val2
630
+ start_format = format_strings.get(start_value)
631
+ end_format = format_strings.get(end_value)
632
+
633
+ if start_format and end_format:
634
+ if negative:
635
+ conditions.append(
636
+ f"({field}::timestamp NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
637
+ else:
638
+ conditions.append(
639
+ f"({field}::timestamp BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
640
+ else:
641
+ for value in values:
642
+ format_string = format_strings.get(value)
643
+ if format_string:
644
+ next_value = get_next_date_value(value, format_string)
645
+
646
+ if negative:
647
+ conditions.append(
648
+ f"({field}::timestamp < TO_TIMESTAMP('{value}', '{format_string}')::timestamp OR {field}::timestamp >= TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
649
+ else:
650
+ conditions.append(
651
+ f"({field}::timestamp >= TO_TIMESTAMP('{value}', '{format_string}')::timestamp AND {field}::timestamp < TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
652
+
653
+ if conditions:
654
+ if negative:
655
+ join_operator = " OR " if operation == 'AND' else " AND "
656
+ else:
657
+ join_operator = " AND " if operation == 'AND' else " OR "
658
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
659
+ elif value_type == 'numeric':
660
+ conditions = []
661
+
662
+ if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
663
+ range_values = field_data['range_values']
664
+ val1, val2 = range_values
665
+ try:
666
+ num1 = float(val1)
667
+ num2 = float(val2)
668
+ if num1 > num2:
669
+ val1, val2 = val2, val1
670
+ except ValueError:
671
+ pass
672
+
673
+ if negative:
674
+ conditions.append(f"(CAST({field} AS FLOAT) NOT BETWEEN {val1} AND {val2})")
675
+ else:
676
+ conditions.append(f"(CAST({field} AS FLOAT) BETWEEN {val1} AND {val2})")
677
+ else:
678
+ for value in values:
679
+ if negative:
680
+ conditions.append(f"(CAST({field} AS FLOAT) != {value})")
681
+ else:
682
+ conditions.append(f"(CAST({field} AS FLOAT) = {value})")
683
+
684
+ if conditions:
685
+ if negative:
686
+ join_operator = " OR " if operation == 'AND' else " AND "
687
+ else:
688
+ join_operator = " AND " if operation == 'AND' else " OR "
689
+
690
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
691
+ elif value_type == 'boolean':
692
+ conditions = []
693
+ for value in values:
694
+ bool_value = value.lower()
695
+ if negative:
696
+ conditions.append(f"(CAST({field} AS BOOLEAN) != {bool_value})")
697
+ else:
698
+ conditions.append(f"(CAST({field} AS BOOLEAN) = {bool_value})")
699
+
700
+ if conditions:
701
+ if negative:
702
+ join_operator = " OR " if operation == 'AND' else " AND "
703
+ else:
704
+ join_operator = " AND " if operation == 'AND' else " OR "
705
+ statement = statement.where(text(join_operator.join(conditions)))
706
+ else:
707
+ field_obj = getattr(table, field)
708
+ is_timestamp = hasattr(field_obj, 'type') and str(field_obj.type).lower().startswith(
709
+ 'timestamp')
710
+
711
+ if is_timestamp:
712
+ conditions = []
713
+ for value in values:
714
+ if negative:
715
+ conditions.append(f"{field}::text != '{value}'")
716
+ else:
717
+ conditions.append(f"{field}::text = '{value}'")
718
+
719
+ join_operator = " AND " if operation == 'AND' else " OR "
720
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
721
+ else:
722
+ conditions = []
723
+ for value in values:
724
+ if '*' in value:
725
+ pattern = value.replace('*', '%')
726
+ if negative:
727
+ conditions.append(f"{field} NOT ILIKE '{pattern}'")
728
+ else:
729
+ conditions.append(f"{field} ILIKE '{pattern}'")
730
+ else:
731
+ if negative:
732
+ conditions.append(f"{field} != '{value}'")
733
+ else:
734
+ conditions.append(f"{field} = '{value}'")
735
+ if negative:
736
+ join_operator = ' AND '
737
+ else:
738
+ join_operator = ' AND ' if operation == 'AND' else ' OR '
739
+ statement = statement.where(text('(' + join_operator.join(conditions) + ')'))
740
+ else:
741
+ conditions = []
742
+ for value in values:
743
+ if value_type == 'datetime':
744
+
745
+ if field_data.get('is_range', False) and len(
746
+ field_data.get('range_values', [])) == 2:
747
+ range_values = field_data['range_values']
748
+ val1, val2 = range_values
749
+ if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
750
+ fmt1 = format_strings.get(val1)
751
+ fmt2 = format_strings.get(val2)
752
+ if fmt1 and fmt2:
753
+ if fmt1 == fmt2:
754
+ if val1 > val2:
755
+ val1, val2 = val2, val1
756
+ else:
757
+ try:
758
+ from datetime import datetime
759
+ dt1 = datetime.strptime(val1,
760
+ fmt1.replace('YYYY', '%Y').replace('MM',
761
+ '%m').replace(
762
+ 'DD', '%d').replace('"T"HH24',
763
+ 'T%H').replace(
764
+ 'MI', '%M').replace('SS',
765
+ '%S').replace(
766
+ 'US', '%f'))
767
+ dt2 = datetime.strptime(val2,
768
+ fmt2.replace('YYYY', '%Y').replace('MM',
769
+ '%m').replace(
770
+ 'DD', '%d').replace('"T"HH24',
771
+ 'T%H').replace(
772
+ 'MI', '%M').replace('SS',
773
+ '%S').replace(
774
+ 'US', '%f'))
775
+ if dt1 > dt2:
776
+ val1, val2 = val2, val1
777
+ except Exception:
778
+ if val1 > val2:
779
+ val1, val2 = val2, val1
780
+ else:
781
+ if val1 > val2:
782
+ val1, val2 = val2, val1
783
+
784
+ start_value, end_value = val1, val2
785
+ start_format = format_strings.get(start_value)
786
+ end_format = format_strings.get(end_value)
787
+
788
+ if start_format and end_format:
789
+ if negative:
790
+ conditions.append(
791
+ f"(payload::jsonb->'{field}'::timestamp NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
792
+ else:
793
+ conditions.append(
794
+ f"(payload::jsonb->'{field}'::timestamp BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
795
+ else:
796
+ format_string = format_strings.get(value)
797
+ if format_string:
798
+ next_value = get_next_date_value(value, format_string)
799
+
800
+ if negative:
801
+ conditions.append(
802
+ f"(payload::jsonb->'{field}'::timestamp < TO_TIMESTAMP('{value}', '{format_string}')::timestamp OR payload::jsonb->'{field}'::timestamp >= TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
803
+ else:
804
+ conditions.append(
805
+ f"(payload::jsonb->'{field}'::timestamp >= TO_TIMESTAMP('{value}', '{format_string}')::timestamp AND payload::jsonb->'{field}'::timestamp < TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
806
+ elif value_type == 'numeric':
807
+ if field_data.get('is_range', False) and len(
808
+ field_data.get('range_values', [])) == 2:
809
+ range_values = field_data['range_values']
810
+ val1, val2 = range_values
811
+ try:
812
+ num1 = float(val1)
813
+ num2 = float(val2)
814
+ if num1 > num2:
815
+ val1, val2 = val2, val1
816
+ except ValueError:
817
+ pass
818
+
819
+ if negative:
820
+ conditions.append(
821
+ f"(jsonb_typeof(payload::jsonb->'{field}') = 'number' AND (payload::jsonb->'{field}')::float NOT BETWEEN {val1} AND {val2})")
822
+ else:
823
+ conditions.append(
824
+ f"(jsonb_typeof(payload::jsonb->'{field}') = 'number' AND (payload::jsonb->'{field}')::float BETWEEN {val1} AND {val2})")
825
+ elif value_type == 'boolean':
826
+ bool_value = value.lower()
827
+ if negative:
828
+ conditions.append(
829
+ f"(jsonb_typeof(payload::jsonb->'{field}') = 'boolean' AND (payload::jsonb->'{field}')::boolean != {bool_value})")
830
+ else:
831
+ conditions.append(
832
+ f"(jsonb_typeof(payload::jsonb->'{field}') = 'boolean' AND (payload::jsonb->'{field}')::boolean = {bool_value})")
833
+ else:
834
+ if '*' in value:
835
+ pattern = value.replace('*', '%')
836
+ if negative:
837
+ conditions.append(f"(payload::jsonb->>'{field}') NOT ILIKE '{pattern}'")
838
+ else:
839
+ conditions.append(f"(payload::jsonb->>'{field}') ILIKE '{pattern}'")
840
+ else:
841
+ if negative:
842
+ conditions.append(f"payload::jsonb->'{field}' != '\"{value}\"'::jsonb")
843
+ else:
844
+ conditions.append(f"payload::jsonb->'{field}' = '\"{value}\"'::jsonb")
845
+
846
+ if conditions:
847
+ if negative:
848
+ join_operator = " OR " if operation == 'AND' else " AND "
849
+ else:
850
+ join_operator = " AND " if operation == 'AND' else " OR "
851
+ statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
852
+ except Exception as e:
853
+ print(f"Error handling field {field}: {e}")
854
+
855
+ if query.filter_schema_names:
856
+ if 'meta' in query.filter_schema_names:
857
+ query.filter_schema_names.remove('meta')
858
+ if query.filter_schema_names:
859
+ statement = statement.where(
860
+ text("(payload ->> 'schema_shortname') IN ({})".format(
861
+ ', '.join(f"'{item}'" for item in query.filter_schema_names)
862
+ ))
863
+ )
864
+ if query.filter_shortnames:
865
+ statement = statement.where(
866
+ col(table.shortname).in_(query.filter_shortnames)
867
+ )
868
+ if query.filter_types:
869
+ statement = statement.where(
870
+ col(table.resource_type).in_(query.filter_types)
871
+ )
872
+ if query.filter_tags:
873
+ statement = statement.where(
874
+ col(table.tags).in_(query.filter_tags)
875
+ )
876
+ if query.from_date:
877
+ statement = statement.where(table.created_at >= query.from_date)
878
+ if query.to_date:
879
+ statement = statement.where(table.created_at <= query.to_date)
880
+
881
+ try:
882
+ if not is_for_count:
883
+ if query.sort_by:
884
+ if query.sort_by.startswith('attributes.'):
885
+ query.sort_by = query.sort_by[11:]
886
+ if "." in query.sort_by:
887
+ # Normalize JSON path for sorting as well (handle leading '@' and body.* shortcut)
888
+ sort_expression = transform_keys_to_sql(
889
+ query.sort_by.replace("@", "", 1) if query.sort_by.startswith("@") else (
890
+ f"payload.{query.sort_by}" if query.sort_by.startswith("body.") else query.sort_by))
891
+ sort_type = " DESC" if query.sort_type == SortType.descending else ""
892
+ sort_expression = f"CASE WHEN ({sort_expression}) ~ '^[0-9]+$' THEN ({sort_expression})::float END {sort_type}, ({sort_expression}) {sort_type}"
893
+ statement = statement.order_by(text(sort_expression))
894
+ else:
895
+ if query.sort_type == SortType.ascending:
896
+ statement = statement.order_by(getattr(table, query.sort_by))
897
+ if query.sort_type == SortType.descending:
898
+ statement = statement.order_by(getattr(table, query.sort_by).desc())
899
+
900
+ except Exception as e:
901
+ print("[!set_sql_statement_from_query]", e)
902
+
903
+ if not is_for_count:
904
+ if query.offset:
905
+ statement = statement.offset(query.offset)
906
+
907
+ statement = statement.limit(query.limit)
908
+
909
+ if query.type == QueryType.tags and not is_for_count and hasattr(table, 'tags'):
910
+ if query.retrieve_json_payload:
911
+ statement = select(
912
+ func.jsonb_array_elements_text(col(table.tags)).label('tag'),
913
+ func.count('*').label('count')
914
+ ).where(col(table.uuid).in_(
915
+ select(col(table.uuid)).where(statement.whereclause) # type: ignore
916
+ )).group_by('tag')
917
+ else:
918
+ statement = select(
919
+ func.jsonb_array_elements_text(col(table.tags)).label('tag')
920
+ ).where(col(table.uuid).in_(
921
+ select(col(table.uuid)).where(statement.whereclause) # type: ignore
922
+ )).distinct()
923
+
924
+ return statement
925
+
926
+
927
+ class SQLAdapter(BaseDataAdapter):
928
+ _engine = None
929
+ _async_session_factory = None
930
+ session: Session
931
+ async_session: sessionmaker
932
+ engine: Any
933
+
934
+ def locators_query(self, query: api.Query) -> tuple[int, list[core.Locator]]:
935
+ locators: list[core.Locator] = []
936
+ total: int = 0
937
+ match query.type:
938
+ case api.QueryType.subpath:
939
+ pass
940
+ # !TODO finsih...
941
+ return total, locators
942
+
943
+ def folder_path(
944
+ self,
945
+ space_name: str,
946
+ subpath: str,
947
+ shortname: str,
948
+ ) -> str:
949
+ return ""
950
+
951
+ async def otp_created_since(self, key: str) -> int | None:
952
+ async with self.get_session() as session:
953
+ result = await session.execute(select(OTP).where(OTP.key == key))
954
+ otp_entry = result.scalar_one_or_none()
955
+
956
+ if otp_entry:
957
+ return int((datetime.now() - otp_entry.timestamp).total_seconds())
958
+
959
+ return None
960
+
961
+ async def save_otp(
962
+ self,
963
+ key: str,
964
+ otp: str,
965
+ ):
966
+ try:
967
+ async with self.get_session() as session:
968
+ otp_entry = OTP(
969
+ key=key,
970
+ value={"otp": otp},
971
+ timestamp=datetime.now()
972
+ )
973
+ session.add(otp_entry)
974
+ except Exception as e:
975
+ async with self.get_session() as session:
976
+ if "UniqueViolationError" in str(e) or "unique constraint" in str(e).lower():
977
+ await session.rollback()
978
+ statement = delete(OTP).where(col(OTP.key) == key)
979
+ await session.execute(statement)
980
+
981
+ otp_entry = OTP(
982
+ key=key,
983
+ value={"otp": otp},
984
+ timestamp=datetime.now()
985
+ )
986
+ session.add(otp_entry)
987
+ else:
988
+ await session.rollback()
989
+ raise e
990
+
991
+ async def get_otp(
992
+ self,
993
+ key: str,
994
+ ):
995
+ async with self.get_session() as session:
996
+ result = await session.execute(select(OTP).where(OTP.key == key))
997
+ otp_entry = result.scalar_one_or_none()
998
+
999
+ if otp_entry:
1000
+ if (datetime.now() - otp_entry.timestamp).total_seconds() > settings.otp_token_ttl:
1001
+ await session.delete(otp_entry)
1002
+ return None
1003
+ return otp_entry.value.get("otp")
1004
+ return None
1005
+
1006
+ async def delete_otp(self, key: str):
1007
+ async with self.get_session() as session:
1008
+ statement = delete(OTP).where(col(OTP.key) == key)
1009
+ await session.execute(statement)
1010
+
1011
+ def metapath(self,
1012
+ space_name: str,
1013
+ subpath: str,
1014
+ shortname: str,
1015
+ class_type: Type[MetaChild],
1016
+ schema_shortname: str | None = None,
1017
+ ) -> tuple[Path, str]:
1018
+ return (Path(), "")
1019
+
1020
+ def __init__(self):
1021
+ if SQLAdapter._engine is None:
1022
+ SQLAdapter._engine = create_async_engine(
1023
+ URL.create(
1024
+ drivername=settings.database_driver,
1025
+ host=settings.database_host,
1026
+ port=settings.database_port,
1027
+ username=settings.database_username,
1028
+ password=settings.database_password,
1029
+ database=settings.database_name,
1030
+ ),
1031
+ echo=False,
1032
+ pool_pre_ping=True,
1033
+ pool_size=settings.database_pool_size,
1034
+ max_overflow=settings.database_max_overflow,
1035
+ pool_timeout=settings.database_pool_timeout,
1036
+ pool_recycle=settings.database_pool_recycle,
1037
+ )
1038
+ self.engine = SQLAdapter._engine
1039
+ try:
1040
+ if SQLAdapter._async_session_factory is None:
1041
+ SQLAdapter._async_session_factory = sessionmaker(
1042
+ self.engine, class_=AsyncSession, expire_on_commit=False
1043
+ ) # type: ignore
1044
+ self.async_session = SQLAdapter._async_session_factory
1045
+ except Exception as e:
1046
+ print("[!FATAL]", e)
1047
+ sys.exit(127)
1048
+
1049
+ async def test_connection(self):
1050
+ try:
1051
+ async with self.get_session() as session:
1052
+ (await session.execute(text("SELECT 1"))).one_or_none()
1053
+ except Exception as e:
1054
+ print("[!FATAL]", e)
1055
+ sys.exit(127)
1056
+
1057
+ @asynccontextmanager
1058
+ async def get_session(self):
1059
+ async_session = self.async_session()
1060
+ try:
1061
+ yield async_session
1062
+ await async_session.commit()
1063
+ finally:
1064
+ await async_session.close() # type: ignore
1065
+
1066
+ def get_table(
1067
+ self, class_type: Type[MetaChild]
1068
+ ) -> Type[Roles] | Type[Permissions] | Type[Users] | Type[Spaces] | Type[Locks] | Type[Attachments] | Type[Entries]:
1069
+
1070
+ match class_type:
1071
+ case core.Role:
1072
+ return Roles
1073
+ case core.Permission:
1074
+ return Permissions
1075
+ case core.User:
1076
+ return Users
1077
+ case core.Space:
1078
+ return Spaces
1079
+ case core.Lock:
1080
+ return Locks
1081
+ case (
1082
+ core.Alteration
1083
+ | core.Media
1084
+ | core.Lock
1085
+ | core.Comment
1086
+ | core.Reply
1087
+ | core.Reaction
1088
+ | core.Json
1089
+ | core.DataAsset
1090
+ ):
1091
+ return Attachments
1092
+ return Entries
1093
+
1094
+ def get_base_model(self, class_type: Type[MetaChild], data,
1095
+ update=None) -> Roles | Permissions | Users | Spaces | Locks | Attachments | Entries:
1096
+ match class_type:
1097
+ case core.User:
1098
+ return Users.model_validate(data, update=update)
1099
+ case core.Role:
1100
+ return Roles.model_validate(data, update=update)
1101
+ case core.Permission:
1102
+ return Permissions.model_validate(data, update=update)
1103
+ case core.Space:
1104
+ return Spaces.model_validate(data, update=update)
1105
+ case (
1106
+ core.Alteration
1107
+ | core.Media
1108
+ | core.Lock
1109
+ | core.Comment
1110
+ | core.Reply
1111
+ | core.Reaction
1112
+ | core.Json
1113
+ | core.DataAsset
1114
+ ):
1115
+ if data.get("media", None) is None:
1116
+ data["media"] = None
1117
+ return Attachments.model_validate(data, update=update)
1118
+ return Entries.model_validate(data, update=update)
1119
+
1120
+ async def get_entry_attachments(
1121
+ self,
1122
+ subpath: str,
1123
+ attachments_path: Path,
1124
+ filter_types: list | None = None,
1125
+ include_fields: list | None = None,
1126
+ filter_shortnames: list | None = None,
1127
+ retrieve_json_payload: bool = False,
1128
+ ) -> dict:
1129
+ attachments_dict: dict[str, list] = {}
1130
+ async with self.get_session() as session:
1131
+ if not subpath.startswith("/"):
1132
+ subpath = f"/{subpath}"
1133
+
1134
+ if str(settings.spaces_folder) in str(attachments_path):
1135
+ attachments_path = attachments_path.relative_to(settings.spaces_folder)
1136
+ space_name = attachments_path.parts[0]
1137
+ shortname = attachments_path.parts[-1]
1138
+ statement = (
1139
+ select(Attachments)
1140
+ .where(Attachments.space_name == space_name)
1141
+ .where(Attachments.subpath == f"{subpath}/{shortname}".replace('//', '/'))
1142
+ )
1143
+ results = list((await session.execute(statement)).all())
1144
+
1145
+ if len(results) == 0:
1146
+ return attachments_dict
1147
+
1148
+ for idx, item in enumerate(results):
1149
+ item = item[0]
1150
+ attachment_record = Attachments.model_validate(item)
1151
+ attachment_json = attachment_record.model_dump()
1152
+ attachment = {
1153
+ "resource_type": attachment_json["resource_type"],
1154
+ "uuid": attachment_json["uuid"],
1155
+ "shortname": attachment_json["shortname"],
1156
+ "subpath": "/".join(attachment_json["subpath"].split("/")[:-1]) # join(),
1157
+ }
1158
+ del attachment_json["resource_type"]
1159
+ del attachment_json["uuid"]
1160
+ del attachment_json["media"]
1161
+ del attachment_json["shortname"]
1162
+ del attachment_json["subpath"]
1163
+ del attachment_json["relationships"]
1164
+ del attachment_json["acl"]
1165
+ del attachment_json["space_name"]
1166
+ attachment["attributes"] = {**attachment_json}
1167
+ if attachment_record.resource_type in attachments_dict:
1168
+ attachments_dict[attachment_record.resource_type].append(attachment)
1169
+ else:
1170
+ attachments_dict[attachment_record.resource_type] = [attachment]
1171
+
1172
+ return attachments_dict
1173
+
1174
+ def payload_path(
1175
+ self,
1176
+ space_name: str,
1177
+ subpath: str,
1178
+ class_type: Type[MetaChild],
1179
+ schema_shortname: str | None = None, ) -> Path:
1180
+ """Construct the full path of the meta file"""
1181
+ path = settings.spaces_folder / space_name
1182
+
1183
+ subpath = copy(subpath)
1184
+ if subpath[0] == "/":
1185
+ subpath = f".{subpath}"
1186
+ if issubclass(class_type, core.Attachment):
1187
+ [parent_subpath, parent_name] = subpath.rsplit("/", 1)
1188
+ # schema_shortname = (
1189
+ # "." + dto.schema_shortname if dto.schema_shortname != "meta" else ""
1190
+ # )
1191
+ schema_shortname = ""
1192
+ attachment_folder = f"{parent_name}/attachments{schema_shortname}.{class_type.__name__.lower()}"
1193
+ path = path / parent_subpath / ".dm" / attachment_folder
1194
+ else:
1195
+ path = path / subpath
1196
+ return path
1197
+
1198
+ async def db_load_or_none(
1199
+ self,
1200
+ space_name: str,
1201
+ subpath: str,
1202
+ shortname: str,
1203
+ class_type: Type[MetaChild],
1204
+ user_shortname: str | None = None,
1205
+ schema_shortname: str | None = None,
1206
+ ) -> Attachments | Entries | Locks | Permissions | Roles | Spaces | Users | None:
1207
+ """Load a Meta Json according to the reuqested Class type"""
1208
+ if not subpath.startswith("/"):
1209
+ subpath = f"/{subpath}"
1210
+
1211
+ shortname = shortname.replace("/", "")
1212
+
1213
+ table = self.get_table(class_type)
1214
+
1215
+ if table is Attachments:
1216
+ statement = select(table).options(defer(Attachments.media)) # type: ignore
1217
+ else:
1218
+ statement = select(table)
1219
+ statement = statement.where(col(table.space_name) == space_name).where(table.shortname == shortname)
1220
+
1221
+ if table in [Entries, Attachments]:
1222
+ statement = statement.where(col(table.subpath) == subpath)
1223
+
1224
+ try:
1225
+ async with self.get_session() as session:
1226
+ return (await session.execute(statement)).scalars().one_or_none() # type: ignore
1227
+ except Exception as e:
1228
+ print("[!load_or_none]", e)
1229
+ logger.error(f"Failed parsing an entry. Error: {e}")
1230
+ return None
1231
+
1232
+ async def get_entry_by_criteria(self, criteria: dict, table: Any = None) -> core.Record | None:
1233
+ async with self.get_session() as session:
1234
+ if table is None:
1235
+ tables = [Entries, Users, Roles, Permissions, Spaces, Attachments]
1236
+ for _table in tables:
1237
+ statement = select(_table)
1238
+ for k, v in criteria.items():
1239
+ # Prefer SQLAlchemy column expressions over raw text to avoid injection
1240
+ if hasattr(_table, k):
1241
+ column = getattr(_table, k)
1242
+ if isinstance(v, str):
1243
+ statement = statement.where(cast(column, String).like(bindparam(k)))
1244
+ statement = statement.params(**{k: f"{v}%"})
1245
+ else:
1246
+ statement = statement.where(column == bindparam(k))
1247
+ statement = statement.params(**{k: v})
1248
+ else:
1249
+ # Unknown column name; skip to avoid potential SQL injection via dynamic identifiers
1250
+ continue
1251
+
1252
+ _result = (await session.execute(statement)).scalars().first()
1253
+
1254
+ if _result is None:
1255
+ continue
1256
+
1257
+ core_model_class_1: core.Meta = getattr(sys.modules["models.core"],
1258
+ camel_case(_result.resource_type))
1259
+ result = core_model_class_1.model_validate(
1260
+ _result.model_dump()
1261
+ ).to_record(_result.subpath, _result.shortname)
1262
+
1263
+ result.attributes = {**result.attributes, "space_name": _result.space_name}
1264
+
1265
+ return result
1266
+ return None
1267
+ else:
1268
+ statement = select(table)
1269
+ for k, v in criteria.items():
1270
+ if hasattr(table, k):
1271
+ column = getattr(table, k)
1272
+ if isinstance(v, str):
1273
+ statement = statement.where(cast(column, String) == bindparam(k))
1274
+ statement = statement.params(**{k: v})
1275
+ else:
1276
+ statement = statement.where(column == bindparam(k))
1277
+ statement = statement.params(**{k: v})
1278
+ else:
1279
+ # Unknown column name; skip
1280
+ continue
1281
+
1282
+ _result = (await session.execute(statement)).scalars().first()
1283
+
1284
+ if _result is None:
1285
+ return None
1286
+
1287
+ core_model_class_2: core.Meta = getattr(sys.modules["models.core"],
1288
+ camel_case(_result.resource_type))
1289
+
1290
+ result = core_model_class_2.model_validate(
1291
+ _result.model_dump()
1292
+ ).to_record(_result.subpath, _result.shortname)
1293
+ result.attributes = {**result.attributes, "space_name": _result.space_name}
1294
+
1295
+ return result
1296
+
1297
+ async def get_latest_history(
1298
+ self,
1299
+ space_name: str,
1300
+ subpath: str,
1301
+ shortname: str,
1302
+ ) -> Histories | None:
1303
+ async with self.get_session() as session:
1304
+ try:
1305
+ statement = select(Histories).where(
1306
+ col(Histories.space_name) == space_name,
1307
+ col(Histories.subpath) == subpath,
1308
+ col(Histories.shortname) == shortname
1309
+ ).order_by(Histories.timestamp.desc()).limit(1) # type: ignore
1310
+ result = await session.execute(statement)
1311
+ return result.scalars().first() # type: ignore
1312
+ except Exception as _: # type: ignore
1313
+ return None
1314
+
1315
+ async def query(
1316
+ self, query: api.Query, user_shortname: str | None = None
1317
+ ) -> Tuple[int, list[core.Record]]:
1318
+ total: int
1319
+ results: list
1320
+
1321
+ if not query.subpath.startswith("/"):
1322
+ query.subpath = f"/{query.subpath}"
1323
+ if query.subpath == "//":
1324
+ query.subpath = "/"
1325
+
1326
+ user_shortname = user_shortname if user_shortname else "anonymous"
1327
+ if user_shortname == "anonymous" and query.type in [QueryType.history, QueryType.events]:
1328
+ raise api.Exception(
1329
+ status.HTTP_401_UNAUTHORIZED,
1330
+ api.Error(
1331
+ type="request",
1332
+ code=InternalErrorCode.NOT_ALLOWED,
1333
+ message="You don't have permission to this action",
1334
+ ),
1335
+ )
1336
+ user_query_policies = await get_user_query_policies(
1337
+ self, user_shortname, query.space_name, query.subpath, query.type == QueryType.spaces
1338
+ )
1339
+ if not query.exact_subpath:
1340
+ r = await get_user_query_policies(
1341
+ self, user_shortname, query.space_name, f'{query.subpath}/%'.replace('//', '/'),
1342
+ query.type == QueryType.spaces
1343
+ )
1344
+ user_query_policies.extend(r)
1345
+
1346
+ if len(user_query_policies) == 0:
1347
+ return 0, []
1348
+
1349
+ if query.type in [QueryType.attachments, QueryType.attachments_aggregation]:
1350
+ table = Attachments
1351
+ statement = select(table).options(defer(table.media)) # type: ignore
1352
+ else:
1353
+ table = set_table_for_query(query)
1354
+ statement = select(table)
1355
+
1356
+ user_permissions = await self.get_user_permissions(user_shortname)
1357
+ filtered_policies = []
1358
+
1359
+ _subpath_target_permissions = '/' if query.subpath == '/' else query.subpath.removeprefix('/')
1360
+ if query.filter_types:
1361
+ for ft in query.filter_types:
1362
+ target_permissions = f'{query.space_name}:{_subpath_target_permissions}:{ft}'
1363
+ filtered_policies = [policy for policy in user_query_policies if
1364
+ policy.startswith(target_permissions)]
1365
+ else:
1366
+ target_permissions = f'{query.space_name}:{_subpath_target_permissions}'
1367
+ filtered_policies = [policy for policy in user_query_policies if policy.startswith(target_permissions)]
1368
+
1369
+ ffv_spaces, ffv_subpath, ffv_resource_type, ffv_query = [], [], [], []
1370
+ for user_query_policy in filtered_policies:
1371
+ for perm_key in user_permissions.keys():
1372
+ if user_query_policy.startswith(perm_key):
1373
+ if ffv := user_permissions[perm_key]['filter_fields_values']:
1374
+ if ffv not in ffv_query:
1375
+ ffv_query.append(ffv)
1376
+ perm_key_splited = perm_key.split(':')
1377
+ ffv_spaces.append(perm_key_splited[0])
1378
+ ffv_subpath.append(perm_key_splited[1])
1379
+ ffv_resource_type.append(perm_key_splited[2])
1380
+
1381
+ if len(ffv_spaces):
1382
+ perm_key_splited_query = f'@space_name:{'|'.join(ffv_spaces)} @subpath:{f"/{'|/'.join(ffv_subpath)}"} @resource_type:{'|'.join(ffv_resource_type)} {' '.join(ffv_query)}'
1383
+ if query.search:
1384
+ query.search += f' {perm_key_splited_query}'
1385
+ else:
1386
+ query.search = perm_key_splited_query
1387
+ if query.search:
1388
+ parts = [p for p in query.search.split(' ') if p]
1389
+ seen = set()
1390
+ deduped_parts = []
1391
+ for p in parts:
1392
+ if p not in seen:
1393
+ seen.add(p)
1394
+ deduped_parts.append(p)
1395
+ query.search = ' '.join(deduped_parts)
1396
+ statement_total = select(func.count(col(table.uuid)))
1397
+
1398
+ if query and query.type == QueryType.events:
1399
+ try:
1400
+ return await events_query(query, user_shortname)
1401
+ except Exception as e:
1402
+ print(e)
1403
+ return 0, []
1404
+
1405
+ if query and query.type == QueryType.tags:
1406
+ try:
1407
+ statement = await set_sql_statement_from_query(table, statement, query, False)
1408
+ statement_total = await set_sql_statement_from_query(table, statement_total, query, True)
1409
+ async with self.get_session() as session:
1410
+ results = list((await session.execute(statement)).all())
1411
+ if len(results) == 0:
1412
+ return 0, []
1413
+
1414
+ tags = []
1415
+ tag_counts = {}
1416
+ if query.retrieve_json_payload:
1417
+ for result in results:
1418
+ if result and len(result) > 1 and result[0]:
1419
+ tag = result[0]
1420
+ count = result[1]
1421
+ tags.append(tag)
1422
+ tag_counts[tag] = count
1423
+ else:
1424
+ for result in results:
1425
+ if result and len(result) > 0 and result[0]:
1426
+ tags.append(result[0])
1427
+ async with self.get_session() as session:
1428
+ _total = (await session.execute(statement_total)).one()
1429
+ total = int(_total[0])
1430
+
1431
+ attributes = {"tags": tags}
1432
+ if query.retrieve_json_payload and tag_counts:
1433
+ attributes["tag_counts"] = tag_counts # type: ignore
1434
+
1435
+ return total, [core.Record(
1436
+ resource_type=core.ResourceType.content,
1437
+ shortname="tags",
1438
+ subpath=query.subpath,
1439
+ attributes=attributes,
1440
+ )]
1441
+ except Exception as e:
1442
+ print("[!!query_tags]", e)
1443
+ return 0, []
1444
+
1445
+ is_fetching_spaces = False
1446
+ if (query.space_name
1447
+ and query.type == QueryType.spaces
1448
+ and query.space_name == "management"
1449
+ and query.subpath == "/"):
1450
+ is_fetching_spaces = True
1451
+ statement = select(Spaces) # type: ignore
1452
+ statement_total = select(func.count(col(Spaces.uuid)))
1453
+ else:
1454
+ statement = await set_sql_statement_from_query(table, statement, query, False)
1455
+ statement_total = await set_sql_statement_from_query(table, statement_total, query, True)
1456
+
1457
+ if query.type != QueryType.spaces:
1458
+ statement = apply_acl_and_query_policies(statement, table, user_shortname, user_query_policies)
1459
+ statement_total = apply_acl_and_query_policies(statement_total, table, user_shortname,
1460
+ user_query_policies)
1461
+
1462
+ try:
1463
+ if query.type == QueryType.aggregation and query.aggregation_data and bool(
1464
+ query.aggregation_data.group_by):
1465
+ statement_total = select(
1466
+ func.sum(statement_total.c["count"]).label('total_count')
1467
+ )
1468
+
1469
+ async with self.get_session() as session:
1470
+ if query.retrieve_total:
1471
+ _total = (await session.execute(statement_total)).one()
1472
+ total = int(_total[0])
1473
+ else:
1474
+ total = -1
1475
+ if query.type == QueryType.counters:
1476
+ return total, []
1477
+
1478
+ if query.type == QueryType.attachments_aggregation:
1479
+ # For aggregation, we need tuples
1480
+ results = list((await session.execute(statement)).all())
1481
+ await session.close()
1482
+ attributes = {}
1483
+ for item in results:
1484
+ attributes.update({item[0]: item[1]})
1485
+ return 1, [core.Record(
1486
+ resource_type=ResourceType.content,
1487
+ uuid=uuid4(),
1488
+ shortname='aggregation_result',
1489
+ subpath=query.subpath,
1490
+ attributes=attributes
1491
+ )]
1492
+ elif query.type == QueryType.aggregation:
1493
+ results = list((await session.execute(statement)).all())
1494
+ await session.close()
1495
+ else:
1496
+ # Non-aggregation: fetch ORM instances directly
1497
+ results = (await session.execute(statement)).scalars().all()
1498
+ await session.close()
1499
+
1500
+ if is_fetching_spaces:
1501
+ from utils.access_control import access_control
1502
+ results = [result for result in results if await access_control.check_space_access(
1503
+ user_shortname if user_shortname else "anonymous", result.shortname
1504
+ )]
1505
+ if len(results) == 0:
1506
+ return 0, []
1507
+
1508
+ results = await self._set_query_final_results(query, results)
1509
+
1510
+ if getattr(query, 'join', None):
1511
+ try:
1512
+ results = await self._apply_client_joins(results, query.join, user_shortname or "anonymous") # type: ignore
1513
+ except Exception as e:
1514
+ print("[!client_join]", e)
1515
+
1516
+ except Exception as e:
1517
+ print("[!!query]", e)
1518
+ raise api.Exception(
1519
+ status_code=status.HTTP_400_BAD_REQUEST,
1520
+ error=api.Error(
1521
+ type="query",
1522
+ code=InternalErrorCode.SOMETHING_WRONG,
1523
+ message=str(e),
1524
+ ),
1525
+ )
1526
+ return total, results
1527
+
1528
+ async def _apply_client_joins(self, base_records: list[core.Record], joins: list[api.JoinQuery], user_shortname: str) -> list[core.Record]:
1529
+ def parse_join_on(expr: str) -> list[tuple[str, bool, str, bool]]:
1530
+ joins_list = []
1531
+ for part in expr.split(','):
1532
+ part = part.strip()
1533
+ if not part:
1534
+ continue
1535
+ parts = [p.strip() for p in part.split(':', 1)]
1536
+ if len(parts) != 2:
1537
+ raise ValueError(f"Invalid join_on expression: {expr}")
1538
+ left, right = parts[0], parts[1]
1539
+ _l_arr = left.endswith('[]')
1540
+ _r_arr = right.endswith('[]')
1541
+ if _l_arr:
1542
+ left = left[:-2]
1543
+ if _r_arr:
1544
+ right = right[:-2]
1545
+ joins_list.append((left, _l_arr, right, _r_arr))
1546
+ return joins_list
1547
+
1548
+ def get_values_from_record(rec: core.Record, path: str, array_hint: bool) -> list:
1549
+ if path in ("shortname", "resource_type", "subpath", "uuid"):
1550
+ val = getattr(rec, path, None)
1551
+ elif path == "space_name":
1552
+ val = rec.attributes.get("space_name") if rec.attributes else None
1553
+ else:
1554
+ container = rec.attributes or {}
1555
+ val = get_nested_value(container, path)
1556
+
1557
+ if val is None:
1558
+ return []
1559
+ if isinstance(val, list):
1560
+ out = []
1561
+ for item in val:
1562
+ if isinstance(item, (str, int, float, bool)) or item is None:
1563
+ out.append(item)
1564
+ return out
1565
+
1566
+ if array_hint:
1567
+ return [val]
1568
+ return [val]
1569
+
1570
+ for rec in base_records:
1571
+ if rec.attributes is None:
1572
+ rec.attributes = {}
1573
+ if rec.attributes.get('join') is None:
1574
+ rec.attributes['join'] = {}
1575
+
1576
+ import models.api as api
1577
+ for join_item in joins:
1578
+ join_on = getattr(join_item, 'join_on', None)
1579
+ alias = getattr(join_item, 'alias', None)
1580
+ q = getattr(join_item, 'query', None)
1581
+ if not join_on or not alias or q is None:
1582
+ continue
1583
+
1584
+ parsed_joins = parse_join_on(join_on)
1585
+ if not parsed_joins:
1586
+ continue
1587
+
1588
+ sub_query = q if isinstance(q, api.Query) else api.Query.model_validate(q)
1589
+ q_raw = q if isinstance(q, dict) else q.model_dump(exclude_defaults=True)
1590
+ user_limit = q_raw.get('limit') or q_raw.get('limit_')
1591
+ sub_query.limit = settings.max_query_limit
1592
+ sub_query = copy(sub_query)
1593
+
1594
+ search_terms = []
1595
+ possible_match = True
1596
+
1597
+ for l_path, l_arr, r_path, r_arr in parsed_joins:
1598
+ left_values = set()
1599
+ for br in base_records:
1600
+ l_vals = get_values_from_record(br, l_path, l_arr)
1601
+ for v in l_vals:
1602
+ if v is not None:
1603
+ left_values.add(str(v))
1604
+
1605
+ if not left_values:
1606
+ possible_match = False
1607
+ break
1608
+
1609
+ search_val = "|".join(left_values)
1610
+ search_terms.append(f"@{r_path}:{search_val}")
1611
+
1612
+ if not possible_match:
1613
+ right_records: list[core.Record] = []
1614
+ else:
1615
+ search_term = " ".join(search_terms)
1616
+ if sub_query.search:
1617
+ sub_query.search = f"{sub_query.search} {search_term}"
1618
+ else:
1619
+ sub_query.search = search_term
1620
+
1621
+ _total, right_records = await self.query(sub_query, user_shortname)
1622
+
1623
+ first_join = parsed_joins[0]
1624
+ l_path_0, l_arr_0, r_path_0, r_arr_0 = first_join
1625
+
1626
+ right_index: dict[str, list[core.Record]] = {}
1627
+ for rr in right_records:
1628
+ r_vals = get_values_from_record(rr, r_path_0, r_arr_0)
1629
+ for v in r_vals:
1630
+ if v is None:
1631
+ continue
1632
+ key = str(v)
1633
+ right_index.setdefault(key, []).append(rr)
1634
+
1635
+ for br in base_records:
1636
+ l_vals = get_values_from_record(br, l_path_0, l_arr_0)
1637
+ candidates: list[core.Record] = []
1638
+ for v in l_vals:
1639
+ if v is None:
1640
+ continue
1641
+ key = str(v)
1642
+ if key in right_index:
1643
+ candidates.extend(right_index[key])
1644
+
1645
+ seen = set()
1646
+ unique_candidates = []
1647
+ for c in candidates:
1648
+ uid = f"{c.subpath}:{c.shortname}:{c.resource_type}"
1649
+ if uid in seen:
1650
+ continue
1651
+ seen.add(uid)
1652
+ unique_candidates.append(c)
1653
+
1654
+ matched = []
1655
+ for cand in unique_candidates:
1656
+ all_match = True
1657
+ for i in range(1, len(parsed_joins)):
1658
+ l_p, l_a, r_p, r_a = parsed_joins[i]
1659
+ l_vs = set(str(x) for x in get_values_from_record(br, l_p, l_a) if x is not None)
1660
+ r_vs = set(str(x) for x in get_values_from_record(cand, r_p, r_a) if x is not None)
1661
+
1662
+ if not l_vs.intersection(r_vs):
1663
+ all_match = False
1664
+ break
1665
+
1666
+ if all_match:
1667
+ matched.append(cand)
1668
+
1669
+ if user_limit:
1670
+ matched = matched[:user_limit]
1671
+
1672
+ br.attributes['join'][alias] = matched
1673
+
1674
+ return base_records
1675
+
1676
+ async def load_or_none(
1677
+ self,
1678
+ space_name: str,
1679
+ subpath: str,
1680
+ shortname: str,
1681
+ class_type: Type[MetaChild],
1682
+ user_shortname: str | None = None,
1683
+ schema_shortname: str | None = None,
1684
+ ) -> MetaChild | None:
1685
+
1686
+ result = await self.db_load_or_none(space_name, subpath, shortname, class_type, user_shortname,
1687
+ schema_shortname)
1688
+ if not result:
1689
+ return None
1690
+
1691
+ try:
1692
+ if hasattr(result, 'payload') and result.payload and isinstance(result.payload, dict):
1693
+ if result.payload.get("body", None) is None:
1694
+ result.payload["body"] = {}
1695
+ result.payload = core.Payload.model_validate(result.payload, strict=False)
1696
+ except Exception as e:
1697
+ print("[!load]", e)
1698
+ logger.error(f"Failed parsing an entry. Error: {e}")
1699
+ return class_type.model_validate(result.model_dump())
1700
+
1701
+ async def load(
1702
+ self,
1703
+ space_name: str,
1704
+ subpath: str,
1705
+ shortname: str,
1706
+ class_type: Type[MetaChild],
1707
+ user_shortname: str | None = None,
1708
+ schema_shortname: str | None = None,
1709
+ ) -> MetaChild:
1710
+ meta: MetaChild | None = await self.load_or_none(space_name, subpath, shortname, class_type, user_shortname,
1711
+ schema_shortname)
1712
+ if meta is None:
1713
+ raise api.Exception(
1714
+ status_code=status.HTTP_404_NOT_FOUND,
1715
+ error=api.Error(
1716
+ type="db",
1717
+ code=InternalErrorCode.OBJECT_NOT_FOUND,
1718
+ message=f"Request object is not available @{space_name}/{subpath}/{shortname} {class_type=} {schema_shortname=}",
1719
+ ),
1720
+ )
1721
+
1722
+ return meta
1723
+
1724
+ async def load_resource_payload(
1725
+ self,
1726
+ space_name: str,
1727
+ subpath: str,
1728
+ filename: str,
1729
+ class_type: Type[MetaChild],
1730
+ schema_shortname: str | None = None,
1731
+ ) -> dict[str, Any] | None:
1732
+ """Load a Meta class payload file"""
1733
+ async with self.get_session() as session:
1734
+ table = self.get_table(class_type)
1735
+ if not subpath.startswith("/"):
1736
+ subpath = f"/{subpath}"
1737
+ statement = select(table).where(table.space_name == space_name)
1738
+
1739
+ if table in [Roles, Permissions, Users]:
1740
+ statement = statement.where(table.shortname == filename.replace('.json', ''))
1741
+ elif table in [Entries, Attachments, Histories]:
1742
+ statement = statement.where(table.subpath == subpath).where(
1743
+ table.shortname == filename.replace('.json', '')
1744
+ )
1745
+
1746
+ result = (await session.execute(statement)).one_or_none()
1747
+ if result is None:
1748
+ return None
1749
+ result = result[0]
1750
+ var: dict = result.model_dump().get("payload", {}).get("body", {})
1751
+ return var
1752
+
1753
+ async def _validate_referential_integrity(self, meta: core.Meta):
1754
+ if isinstance(meta, core.User):
1755
+ if meta.roles:
1756
+ for role in meta.roles:
1757
+ if not await self.load_or_none(settings.management_space, 'roles', role, core.Role):
1758
+ raise api.Exception(
1759
+ status_code=status.HTTP_400_BAD_REQUEST,
1760
+ error=api.Error(
1761
+ type="validation",
1762
+ code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
1763
+ message=f"Role '{role}' does not exist",
1764
+ ),
1765
+ )
1766
+ if meta.groups:
1767
+ for group in meta.groups:
1768
+ if not await self.load_or_none(settings.management_space, 'groups', group, core.Group):
1769
+ raise api.Exception(
1770
+ status_code=status.HTTP_400_BAD_REQUEST,
1771
+ error=api.Error(
1772
+ type="validation",
1773
+ code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
1774
+ message=f"Group '{group}' does not exist",
1775
+ ),
1776
+ )
1777
+ elif isinstance(meta, core.Role):
1778
+ if meta.permissions:
1779
+ for permission in meta.permissions:
1780
+ if not await self.load_or_none(settings.management_space, 'permissions', permission, core.Permission):
1781
+ raise api.Exception(
1782
+ status_code=status.HTTP_400_BAD_REQUEST,
1783
+ error=api.Error(
1784
+ type="validation",
1785
+ code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
1786
+ message=f"Permission '{permission}' does not exist",
1787
+ ),
1788
+ )
1789
+ elif isinstance(meta, core.Group):
1790
+ if hasattr(meta, 'roles') and meta.roles:
1791
+ for role in meta.roles:
1792
+ if not await self.load_or_none(settings.management_space, 'roles', role, core.Role):
1793
+ raise api.Exception(
1794
+ status_code=status.HTTP_400_BAD_REQUEST,
1795
+ error=api.Error(
1796
+ type="validation",
1797
+ code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
1798
+ message=f"Role '{role}' does not exist",
1799
+ ),
1800
+ )
1801
+
1802
+ async def _check_in_use(self, meta: core.Meta):
1803
+ async with self.get_session() as session:
1804
+ if isinstance(meta, core.Role):
1805
+ statement = select(Users.shortname).where(col(Users.roles).contains([meta.shortname]))
1806
+ result = await session.execute(statement)
1807
+ if result.first():
1808
+ raise api.Exception(
1809
+ status_code=status.HTTP_400_BAD_REQUEST,
1810
+ error=api.Error(
1811
+ type="delete",
1812
+ code=InternalErrorCode.CANNT_DELETE,
1813
+ message=f"Role '{meta.shortname}' is in use by one or more users",
1814
+ ),
1815
+ )
1816
+ elif isinstance(meta, core.Group):
1817
+ statement = select(Users.shortname).where(col(Users.groups).contains([meta.shortname]))
1818
+ result = await session.execute(statement)
1819
+ if result.first():
1820
+ raise api.Exception(
1821
+ status_code=status.HTTP_400_BAD_REQUEST,
1822
+ error=api.Error(
1823
+ type="delete",
1824
+ code=InternalErrorCode.CANNT_DELETE,
1825
+ message=f"Group '{meta.shortname}' is in use by one or more users",
1826
+ ),
1827
+ )
1828
+ elif isinstance(meta, core.Permission):
1829
+ statement = select(Roles.shortname).where(col(Roles.permissions).contains([meta.shortname]))
1830
+ result = await session.execute(statement)
1831
+ if result.first():
1832
+ raise api.Exception(
1833
+ status_code=status.HTTP_400_BAD_REQUEST,
1834
+ error=api.Error(
1835
+ type="delete",
1836
+ code=InternalErrorCode.CANNT_DELETE,
1837
+ message=f"Permission '{meta.shortname}' is in use by one or more roles",
1838
+ ),
1839
+ )
1840
+
1841
+ async def save(
1842
+ self, space_name: str, subpath: str, meta: core.Meta
1843
+ ) -> Any:
1844
+ """Save"""
1845
+ await self._validate_referential_integrity(meta)
1846
+ try:
1847
+ async with self.get_session() as session:
1848
+ entity = {
1849
+ **meta.model_dump(),
1850
+ "space_name": space_name,
1851
+ "subpath": subpath,
1852
+ }
1853
+
1854
+ if meta.__class__ is core.Folder:
1855
+ if entity["subpath"] != "/":
1856
+ if not entity["subpath"].startswith("/"):
1857
+ entity["subpath"] = f'/{entity["subpath"]}'
1858
+ if entity["subpath"].endswith("/"):
1859
+ entity["subpath"] = entity["subpath"][:-1]
1860
+
1861
+ if "subpath" in entity:
1862
+ if entity["subpath"] != "/" and entity["subpath"].endswith("/"):
1863
+ entity["subpath"] = entity["subpath"][:-1]
1864
+ entity["subpath"] = subpath_checker(entity["subpath"])
1865
+
1866
+ entity['resource_type'] = meta.__class__.__name__.lower()
1867
+ data = self.get_base_model(meta.__class__, entity)
1868
+
1869
+ if not isinstance(data, Attachments) and not isinstance(data, Histories):
1870
+ data.query_policies = generate_query_policies(
1871
+ space_name=space_name,
1872
+ subpath=subpath,
1873
+ resource_type=entity['resource_type'],
1874
+ is_active=entity['is_active'],
1875
+ owner_shortname=entity.get('owner_shortname', 'dmart'),
1876
+ owner_group_shortname=entity.get('owner_group_shortname', None),
1877
+ )
1878
+ session.add(data)
1879
+ try:
1880
+ await session.commit()
1881
+ await session.refresh(data)
1882
+ except Exception as e:
1883
+ await session.rollback()
1884
+ raise e
1885
+ # Refresh authz MVs only when Users/Roles/Permissions changed
1886
+ # try:
1887
+ # if isinstance(data, (Users, Roles, Permissions)):
1888
+ # await self.ensure_authz_materialized_views_fresh()
1889
+ # except Exception as _e:
1890
+ # logger.warning(f"AuthZ MV refresh after save skipped: {_e}")
1891
+ return data
1892
+
1893
+ except Exception as e:
1894
+ print("[!save]", e)
1895
+ logger.error(f"Failed saving an entry. Error: {e}")
1896
+ raise api.Exception(
1897
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
1898
+ error=api.Error(
1899
+ type="db",
1900
+ code=InternalErrorCode.SOMETHING_WRONG,
1901
+ message=f"Failed saving an entry. Error: {e}",
1902
+ ),
1903
+ )
1904
+
1905
+ async def create(self, space_name: str, subpath: str, meta: core.Meta):
1906
+ result = await self.load_or_none(space_name, subpath, meta.shortname, meta.__class__)
1907
+
1908
+ if result is not None:
1909
+ raise api.Exception(
1910
+ status_code=status.HTTP_400_BAD_REQUEST,
1911
+ error=api.Error(
1912
+ type="create",
1913
+ code=InternalErrorCode.SHORTNAME_ALREADY_EXIST,
1914
+ message="already exists",
1915
+ ),
1916
+ )
1917
+
1918
+ await self.save(space_name, subpath, meta)
1919
+
1920
+ async def save_payload(
1921
+ self, space_name: str, subpath: str, meta: core.Meta, attachment
1922
+ ):
1923
+ if meta.__class__ != core.Content:
1924
+ media = await attachment.read()
1925
+ await self.update(
1926
+ space_name, subpath, meta,
1927
+ {}, {}, [],
1928
+ "", attachment_media=media
1929
+ )
1930
+ else:
1931
+ content = json.load(attachment.file)
1932
+ if meta.payload:
1933
+ meta.payload.body = content
1934
+ await self.update(
1935
+ space_name, subpath, meta,
1936
+ {}, {}, [],
1937
+ ""
1938
+ )
1939
+
1940
+ async def save_payload_from_json(
1941
+ self,
1942
+ space_name: str,
1943
+ subpath: str,
1944
+ meta: core.Meta,
1945
+ payload_data: dict[str, Any],
1946
+ ):
1947
+ try:
1948
+ result = await self.db_load_or_none(space_name, subpath, meta.shortname, meta.__class__)
1949
+ if result is None:
1950
+ raise api.Exception(
1951
+ status_code=status.HTTP_400_BAD_REQUEST,
1952
+ error=api.Error(
1953
+ type="create",
1954
+ code=InternalErrorCode.MISSING_METADATA,
1955
+ message="metadata is missing",
1956
+ ),
1957
+ )
1958
+ if meta.payload:
1959
+ if isinstance(meta.payload.body, dict):
1960
+ meta.payload.body = {
1961
+ **meta.payload.body,
1962
+ **payload_data,
1963
+ }
1964
+ else:
1965
+ meta.payload.body = payload_data
1966
+
1967
+ await self._validate_referential_integrity(meta)
1968
+ result.sqlmodel_update(meta.model_dump())
1969
+ async with self.get_session() as session:
1970
+ session.add(result)
1971
+ except Exception as e:
1972
+ print("[!save_payload_from_json]", e)
1973
+ logger.error(f"Failed parsing an entry. Error: {e}")
1974
+ raise api.Exception(
1975
+ status_code=status.HTTP_400_BAD_REQUEST,
1976
+ error=api.Error(
1977
+ type="update",
1978
+ code=InternalErrorCode.SOMETHING_WRONG,
1979
+ message="failed to update entry",
1980
+ ),
1981
+ )
1982
+
1983
+ async def update(
1984
+ self,
1985
+ space_name: str,
1986
+ subpath: str,
1987
+ meta: core.Meta,
1988
+ old_version_flattend: dict,
1989
+ new_version_flattend: dict,
1990
+ updated_attributes_flattend: list,
1991
+ user_shortname: str,
1992
+ schema_shortname: str | None = None,
1993
+ retrieve_lock_status: bool | None = False,
1994
+ attachment_media: Any | None = None,
1995
+ ) -> dict:
1996
+ """Update the entry, store the difference and return it"""
1997
+ await self._validate_referential_integrity(meta)
1998
+
1999
+ try:
2000
+ result = await self.db_load_or_none(space_name, subpath, meta.shortname, meta.__class__)
2001
+ if result is None:
2002
+ raise api.Exception(
2003
+ status_code=status.HTTP_400_BAD_REQUEST,
2004
+ error=api.Error(
2005
+ type="create",
2006
+ code=InternalErrorCode.MISSING_METADATA,
2007
+ message="metadata is missing",
2008
+ ),
2009
+ )
2010
+
2011
+ if isinstance(result, Users) and not result.is_active and meta.is_active:
2012
+ await self.set_failed_password_attempt_count(result.shortname, 0)
2013
+
2014
+ result.sqlmodel_update(meta.model_dump())
2015
+
2016
+ if hasattr(result, "subpath") and (not result.subpath.startswith("/")):
2017
+ result.subpath = f"/{result.subpath}"
2018
+
2019
+ if isinstance(result, Attachments) and attachment_media:
2020
+ result.media = attachment_media
2021
+ if hasattr(result, 'query_policies'):
2022
+ result.query_policies = generate_query_policies(
2023
+ space_name=space_name,
2024
+ subpath=subpath,
2025
+ resource_type=result.resource_type, # type: ignore
2026
+ is_active=result.is_active, # type: ignore
2027
+ owner_shortname=result.owner_shortname,
2028
+ owner_group_shortname=result.owner_shortname,
2029
+ )
2030
+
2031
+ if meta.__class__ is not core.Lock or not isinstance(result, Locks):
2032
+ result.updated_at = datetime.now()
2033
+ new_version_flattend['updated_at'] = result.updated_at.isoformat() # type: ignore
2034
+ if "updated_at" not in updated_attributes_flattend:
2035
+ updated_attributes_flattend.append("updated_at")
2036
+ if 'updated_at' in old_version_flattend:
2037
+ old_version_flattend['updated_at'] = old_version_flattend['updated_at'].isoformat()
2038
+
2039
+ async with self.get_session() as session:
2040
+ session.add(result)
2041
+
2042
+ # try:
2043
+ # if isinstance(result, (Users, Roles, Permissions)):
2044
+ # await self.ensure_authz_materialized_views_fresh()
2045
+ # except Exception as _e:
2046
+ # logger.warning(f"AuthZ MV refresh after update skipped: {_e}")
2047
+ except Exception as e:
2048
+ print("[!update]", e)
2049
+ logger.error(f"Failed parsing an entry. Error: {e}")
2050
+ raise api.Exception(
2051
+ status_code=status.HTTP_400_BAD_REQUEST,
2052
+ error=api.Error(
2053
+ type="update",
2054
+ code=InternalErrorCode.SOMETHING_WRONG,
2055
+ message="failed to update entry",
2056
+ ),
2057
+ )
2058
+
2059
+ history_diff = await self.store_entry_diff(
2060
+ space_name,
2061
+ subpath,
2062
+ meta.shortname,
2063
+ user_shortname,
2064
+ old_version_flattend,
2065
+ new_version_flattend,
2066
+ updated_attributes_flattend,
2067
+ meta.__class__,
2068
+ )
2069
+ return history_diff
2070
+
2071
+ async def update_payload(
2072
+ self,
2073
+ space_name: str,
2074
+ subpath: str,
2075
+ meta: core.Meta,
2076
+ payload_data: dict[str, Any],
2077
+ owner_shortname: str,
2078
+ ):
2079
+ if not meta.payload:
2080
+ meta.payload = core.Payload()
2081
+ meta.payload.body = payload_data
2082
+ await self.update(
2083
+ space_name, subpath, meta, {}, {}, [], owner_shortname
2084
+ )
2085
+
2086
+ async def store_entry_diff(
2087
+ self,
2088
+ space_name: str,
2089
+ subpath: str,
2090
+ shortname: str,
2091
+ owner_shortname: str,
2092
+ old_version_flattend: dict,
2093
+ new_version_flattend: dict,
2094
+ updated_attributes_flattend: list,
2095
+ resource_type,
2096
+ ) -> dict:
2097
+ try:
2098
+ diff_keys = list(old_version_flattend.keys())
2099
+ diff_keys.extend(list(new_version_flattend.keys()))
2100
+ history_diff = {}
2101
+ for key in set(diff_keys):
2102
+ # if key in updated_attributes_flattend:
2103
+ old = copy(old_version_flattend.get(key, "null"))
2104
+ new = copy(new_version_flattend.get(key, "null"))
2105
+
2106
+ if old != new:
2107
+ if isinstance(old, list) and isinstance(new, list):
2108
+ old, new = arr_remove_common(old, new)
2109
+
2110
+ history_diff[key] = {"old": old, "new": new}
2111
+ removed = get_removed_items(list(old_version_flattend.keys()),
2112
+ list(new_version_flattend.keys()))
2113
+ for r in removed:
2114
+ history_diff[r] = {
2115
+ "old": old_version_flattend[r],
2116
+ "new": None,
2117
+ }
2118
+ if not history_diff:
2119
+ return {}
2120
+
2121
+ new_version_json = json.dumps(new_version_flattend, sort_keys=True, default=str)
2122
+ new_checksum = hashlib.sha1(new_version_json.encode()).hexdigest()
2123
+
2124
+ history_obj = Histories(
2125
+ space_name=space_name,
2126
+ uuid=uuid4(),
2127
+ shortname=shortname,
2128
+ owner_shortname=owner_shortname or "__system__",
2129
+ timestamp=datetime.now(),
2130
+ request_headers=get_request_data().get("request_headers", {}),
2131
+ diff=history_diff,
2132
+ subpath=subpath,
2133
+ last_checksum_history=new_checksum,
2134
+ )
2135
+
2136
+ async with self.get_session() as session:
2137
+ session.add(Histories.model_validate(history_obj))
2138
+ table = self.get_table(resource_type)
2139
+ await session.execute(
2140
+ update(table).where(
2141
+ col(table.space_name) == space_name,
2142
+ col(table.subpath) == subpath,
2143
+ col(table.shortname) == shortname
2144
+ ).values(last_checksum_history=new_checksum)
2145
+ )
2146
+
2147
+ return history_diff
2148
+ except Exception as e:
2149
+ print("[!store_entry_diff]", e)
2150
+ logger.error(f"Failed parsing an entry. Error: {e}")
2151
+ return {}
2152
+
2153
+ async def move(
2154
+ self,
2155
+ src_space_name: str,
2156
+ src_subpath: str,
2157
+ src_shortname: str,
2158
+ dest_space_name: str,
2159
+ dest_subpath: str,
2160
+ dest_shortname: str,
2161
+ meta: core.Meta,
2162
+ ):
2163
+ """Move the file that match the criteria given, remove source folder if empty"""
2164
+ if not src_subpath.startswith("/"):
2165
+ src_subpath = f"/{src_subpath}"
2166
+ if dest_subpath and not dest_subpath.startswith("/"):
2167
+ dest_subpath = f"/{dest_subpath}"
2168
+
2169
+ origin = await self.db_load_or_none(src_space_name, src_subpath, src_shortname, meta.__class__)
2170
+ if isinstance(origin, Locks):
2171
+ raise api.Exception(
2172
+ status_code=status.HTTP_400_BAD_REQUEST,
2173
+ error=api.Error(
2174
+ type="move",
2175
+ code=InternalErrorCode.NOT_ALLOWED,
2176
+ message="Locks cannot be moved",
2177
+ ),
2178
+ )
2179
+ if origin is None:
2180
+ raise api.Exception(
2181
+ status_code=status.HTTP_400_BAD_REQUEST,
2182
+ error=api.Error(
2183
+ type="move",
2184
+ code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
2185
+ message="Entry does not exist",
2186
+ ),
2187
+ )
2188
+
2189
+ async with self.get_session() as session:
2190
+ old_shortname = ""
2191
+ old_subpath = ""
2192
+ try:
2193
+ old_shortname = origin.shortname
2194
+ if hasattr(origin, 'subpath'):
2195
+ old_subpath = origin.subpath
2196
+ table = self.get_table(meta.__class__)
2197
+ statement = select(table).where(table.space_name == dest_space_name)
2198
+
2199
+ if table in [Roles, Permissions, Users, Spaces]:
2200
+ statement = statement.where(table.shortname == dest_shortname)
2201
+ else:
2202
+ statement = statement.where(table.subpath == dest_subpath).where(
2203
+ table.shortname == dest_shortname
2204
+ )
2205
+
2206
+ target = (await session.execute(statement)).one_or_none()
2207
+ if target is not None:
2208
+ raise api.Exception(
2209
+ status_code=status.HTTP_400_BAD_REQUEST,
2210
+ error=api.Error(
2211
+ type="move",
2212
+ code=InternalErrorCode.SHORTNAME_ALREADY_EXIST,
2213
+ message="already exists",
2214
+ ),
2215
+ )
2216
+ if dest_shortname:
2217
+ origin.shortname = dest_shortname
2218
+
2219
+ if hasattr(origin, 'subpath') and dest_subpath:
2220
+ origin.subpath = dest_subpath
2221
+
2222
+ if hasattr(origin, 'space_name') and dest_space_name:
2223
+ origin.space_name = dest_space_name
2224
+
2225
+ origin.query_policies = generate_query_policies(
2226
+ space_name=dest_space_name,
2227
+ subpath=dest_subpath,
2228
+ resource_type=origin.resource_type if hasattr(origin,
2229
+ 'resource_type') else origin.__class__.__name__.lower()[
2230
+ :-1],
2231
+ is_active=origin.is_active if hasattr(origin, 'is_active') else True,
2232
+ owner_shortname=origin.owner_shortname,
2233
+ owner_group_shortname=None,
2234
+ )
2235
+
2236
+ session.add(origin)
2237
+ try:
2238
+ if table is Spaces:
2239
+ await session.execute(
2240
+ update(Spaces)
2241
+ .where(col(Spaces.space_name) == src_space_name)
2242
+ .values(space_name=dest_shortname,shortname=dest_shortname)
2243
+ )
2244
+ await session.execute(
2245
+ update(Entries)
2246
+ .where(col(Entries.space_name) == src_space_name)
2247
+ .values(space_name=dest_shortname)
2248
+ )
2249
+ await session.execute(
2250
+ update(Attachments)
2251
+ .where(col(Attachments.space_name) == src_space_name)
2252
+ .values(space_name=dest_shortname)
2253
+ )
2254
+ except Exception as e:
2255
+ origin.shortname = old_shortname
2256
+ if hasattr(origin, 'subpath'):
2257
+ origin.subpath = old_subpath
2258
+
2259
+ session.add(origin)
2260
+
2261
+ print("[!move]", e)
2262
+ logger.error(f"Failed parsing an entry. Error: {e}")
2263
+ raise api.Exception(
2264
+ status_code=status.HTTP_400_BAD_REQUEST,
2265
+ error=api.Error(
2266
+ type="move",
2267
+ code=InternalErrorCode.SOMETHING_WRONG,
2268
+ message="failed to move entry",
2269
+ ),
2270
+ )
2271
+ except Exception as e:
2272
+ print("[!move]", e)
2273
+ logger.error(f"Failed parsing an entry. Error: {e}")
2274
+ raise api.Exception(
2275
+ status_code=status.HTTP_400_BAD_REQUEST,
2276
+ error=api.Error(
2277
+ type="move",
2278
+ code=InternalErrorCode.SOMETHING_WRONG,
2279
+ message="failed to move entry",
2280
+ ),
2281
+ )
2282
+
2283
+ def delete_empty(self, path: Path):
2284
+ pass
2285
+
2286
+ async def clone(
2287
+ self,
2288
+ src_space: str,
2289
+ dest_space: str,
2290
+ src_subpath: str,
2291
+ src_shortname: str,
2292
+ dest_subpath: str,
2293
+ dest_shortname: str,
2294
+ class_type: Type[MetaChild],
2295
+ ):
2296
+ pass
2297
+
2298
+ async def is_entry_exist(self,
2299
+ space_name: str,
2300
+ subpath: str,
2301
+ shortname: str,
2302
+ resource_type: ResourceType,
2303
+ schema_shortname: str | None = None, ) -> bool:
2304
+ async with self.get_session() as session:
2305
+ resource_cls = getattr(
2306
+ sys.modules["models.core"], camel_case(resource_type)
2307
+ )
2308
+
2309
+ table = self.get_table(resource_cls)
2310
+ if not subpath.startswith("/"):
2311
+ subpath = f"/{subpath}"
2312
+
2313
+ statement = select(table).where(table.space_name == space_name)
2314
+
2315
+ if table in [Roles, Permissions, Users]:
2316
+ statement = statement.where(table.shortname == shortname)
2317
+ elif resource_cls in [
2318
+ core.Alteration,
2319
+ core.Media,
2320
+ core.Lock,
2321
+ core.Comment,
2322
+ core.Reply,
2323
+ core.Reaction,
2324
+ core.Json,
2325
+ core.DataAsset,
2326
+ ]:
2327
+ statement = statement.where(table.subpath == subpath).where(
2328
+ table.shortname == shortname
2329
+ )
2330
+
2331
+ else:
2332
+ statement = statement.where(table.subpath == subpath).where(
2333
+ table.shortname == shortname
2334
+ )
2335
+
2336
+ result = (await session.execute(statement)).fetchall()
2337
+ result = [result[0] for result in result]
2338
+ return False if len(result) == 0 else True
2339
+
2340
+ async def delete(
2341
+ self,
2342
+ space_name: str,
2343
+ subpath: str,
2344
+ meta: core.Meta,
2345
+ user_shortname: str,
2346
+ schema_shortname: str | None = None,
2347
+ retrieve_lock_status: bool | None = False,
2348
+ ):
2349
+ """Delete the file that match the criteria given, remove folder if empty"""
2350
+ await self._check_in_use(meta)
2351
+ async with self.get_session() as session:
2352
+ try:
2353
+ if not subpath.startswith("/"):
2354
+ subpath = f"/{subpath}"
2355
+
2356
+ result = await self.db_load_or_none(space_name, subpath, meta.shortname, meta.__class__)
2357
+
2358
+ if meta.__class__ == core.User:
2359
+ try:
2360
+ await session.execute(update(Spaces).where(col(Spaces.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2361
+ await session.execute(update(Entries).where(col(Entries.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2362
+ await session.execute(update(Attachments).where(col(Attachments.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2363
+ await session.execute(update(Roles).where(col(Roles.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2364
+ await session.execute(update(Permissions).where(col(Permissions.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2365
+
2366
+ await session.execute(update(Locks).where(col(Locks.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2367
+ await session.execute(update(Histories).where(col(Histories.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
2368
+
2369
+ await session.execute(delete(Sessions).where(col(Sessions.shortname) == meta.shortname))
2370
+ except Exception as _e:
2371
+ logger.warning(f"Failed to reassign ownership to anonymous for user {meta.shortname}: {_e}")
2372
+
2373
+ await session.delete(result)
2374
+ if meta.__class__ == core.Space:
2375
+ statement2 = delete(Attachments).where(col(Attachments.space_name) == space_name)
2376
+ await session.execute(statement2)
2377
+ statement = delete(Entries).where(col(Entries.space_name) == space_name)
2378
+ await session.execute(statement)
2379
+ if meta.__class__ == core.Folder:
2380
+ _subpath = f"{subpath}/{meta.shortname}".replace('//', '/')
2381
+ statement2 = delete(Attachments) \
2382
+ .where(col(Attachments.space_name) == space_name) \
2383
+ .where(col(Attachments.subpath).startswith(_subpath))
2384
+ await session.execute(statement2)
2385
+ statement = delete(Entries) \
2386
+ .where(col(Entries.space_name) == space_name) \
2387
+ .where(col(Entries.subpath).startswith(_subpath))
2388
+ await session.execute(statement)
2389
+ elif isinstance(result, Entries):
2390
+ entry_attachment_subpath = f"{subpath}/{meta.shortname}".replace('//', '/')
2391
+ statement = delete(Attachments) \
2392
+ .where(col(Attachments.space_name) == space_name) \
2393
+ .where(col(Attachments.subpath).startswith(entry_attachment_subpath))
2394
+ await session.execute(statement)
2395
+
2396
+ # Refresh authz MVs only when Users/Roles/Permissions changed
2397
+ # try:
2398
+ # if meta.__class__ in (core.User, core.Role, core.Permission):
2399
+ # await self.ensure_authz_materialized_views_fresh()
2400
+ # except Exception as _e:
2401
+ # logger.warning(f"AuthZ MV refresh after delete skipped: {_e}")
2402
+ except Exception as e:
2403
+ print("[!delete]", e)
2404
+ logger.error(f"Failed parsing an entry. Error: {e}")
2405
+ raise api.Exception(
2406
+ status_code=status.HTTP_400_BAD_REQUEST,
2407
+ error=api.Error(
2408
+ type="delete",
2409
+ code=InternalErrorCode.SOMETHING_WRONG,
2410
+ message="failed to delete entry",
2411
+ ),
2412
+ )
2413
+
2414
+ async def lock_handler(self, space_name: str, subpath: str, shortname: str, user_shortname: str,
2415
+ action: LockAction) -> dict | None:
2416
+ if not subpath.startswith("/"):
2417
+ subpath = f"/{subpath}"
2418
+
2419
+ async with self.get_session() as session:
2420
+ match action:
2421
+ case LockAction.lock:
2422
+ statement = select(Locks).where(Locks.space_name == space_name) \
2423
+ .where(Locks.subpath == subpath) \
2424
+ .where(Locks.shortname == shortname)
2425
+ result = (await session.execute(statement)).one_or_none()
2426
+ if result:
2427
+ raise api.Exception(
2428
+ status_code=status.HTTP_400_BAD_REQUEST,
2429
+ error=api.Error(
2430
+ type="lock",
2431
+ code=InternalErrorCode.LOCKED_ENTRY,
2432
+ message="entry already locked already exists!",
2433
+ )
2434
+ )
2435
+
2436
+ lock = Locks(
2437
+ uuid=uuid4(),
2438
+ space_name=space_name,
2439
+ subpath=subpath,
2440
+ shortname=shortname,
2441
+ owner_shortname=user_shortname,
2442
+ )
2443
+ session.add(lock)
2444
+ await session.commit()
2445
+ await session.refresh(lock)
2446
+ return lock.model_dump()
2447
+ case LockAction.fetch:
2448
+ lock_payload = (await self.load(
2449
+ space_name=space_name,
2450
+ subpath=subpath,
2451
+ shortname=shortname,
2452
+ class_type=core.Lock,
2453
+ user_shortname=user_shortname,
2454
+ )).model_dump()
2455
+ return lock_payload
2456
+ case LockAction.unlock:
2457
+ statement2 = delete(Locks) \
2458
+ .where(col(Locks.space_name) == space_name) \
2459
+ .where(col(Locks.subpath) == subpath) \
2460
+ .where(col(Locks.shortname) == shortname)
2461
+ await session.execute(statement2)
2462
+ await session.commit()
2463
+ return None
2464
+
2465
+ async def fetch_space(self, space_name: str) -> core.Space | None:
2466
+ try:
2467
+ return await self.load(space_name, "/", space_name, core.Space)
2468
+ except Exception as e:
2469
+ print("[!fetch_space]", e)
2470
+ return None
2471
+
2472
+ async def set_user_session(self, user_shortname: str, token: str) -> bool:
2473
+ try:
2474
+ total, last_session = await self.get_user_session(user_shortname, token)
2475
+
2476
+ if (settings.max_sessions_per_user == 1 and last_session is not None) \
2477
+ or (settings.max_sessions_per_user != 0 and total >= settings.max_sessions_per_user):
2478
+ await self.remove_user_session(user_shortname)
2479
+
2480
+ timestamp = datetime.now()
2481
+ async with self.get_session() as session:
2482
+ session.add(
2483
+ Sessions(
2484
+ uuid=uuid4(),
2485
+ shortname=user_shortname,
2486
+ token=hash_password(token),
2487
+ timestamp=timestamp,
2488
+ )
2489
+ )
2490
+
2491
+ return True
2492
+ except Exception as e:
2493
+ print("[!set_sql_user_session]", e)
2494
+ return False
2495
+
2496
+ async def get_user_session(self, user_shortname: str, token: str) -> Tuple[int, str | None]:
2497
+ async with self.get_session() as session:
2498
+ statement = select(Sessions) \
2499
+ .where(col(Sessions.shortname) == user_shortname)
2500
+
2501
+ results = (await session.execute(statement)).all()
2502
+ results = [result[0] for result in results]
2503
+
2504
+ if len(results) == 0:
2505
+ return 0, None
2506
+
2507
+ for r in results:
2508
+ if settings.session_inactivity_ttl + r.timestamp.timestamp() < time.time():
2509
+ await session.execute(delete(Sessions).where(col(Sessions.uuid) == r.uuid))
2510
+ continue
2511
+ if verify_password(token, r.token):
2512
+ r.timestamp = datetime.now()
2513
+ session.add(r)
2514
+ await session.commit()
2515
+ return len(results), token
2516
+ # else:
2517
+ # await session.execute(delete(Sessions).where(col(Sessions.uuid) == r.uuid))
2518
+ return len(results), None
2519
+
2520
+ async def remove_user_session(self, user_shortname: str) -> bool:
2521
+ async with self.get_session() as session:
2522
+ try:
2523
+ statement = select(Sessions).where(col(Sessions.shortname) == user_shortname).order_by(
2524
+ col(Sessions.timestamp).desc()
2525
+ ).offset(settings.max_sessions_per_user - 1)
2526
+ oldest_sessions = (await session.execute(statement)).all()
2527
+ oldest_sessions = [oldest_session[0] for oldest_session in oldest_sessions]
2528
+ for oldest_session in oldest_sessions:
2529
+ await session.delete(oldest_session)
2530
+ await session.commit()
2531
+ return True
2532
+ except Exception as e:
2533
+ print("[!remove_sql_user_session]", e)
2534
+ return False
2535
+
2536
+ async def set_invitation(self, invitation_token: str, invitation_value):
2537
+ async with self.get_session() as session:
2538
+ timestamp = datetime.now()
2539
+ try:
2540
+ session.add(
2541
+ Invitations(
2542
+ uuid=uuid4(),
2543
+ invitation_token=invitation_token,
2544
+ invitation_value=invitation_value,
2545
+ timestamp=timestamp,
2546
+ )
2547
+ )
2548
+ except Exception as e:
2549
+ print("[!set_invitation]", e)
2550
+
2551
+ async def get_invitation(self, invitation_token: str) -> str | None:
2552
+ async with self.get_session() as session:
2553
+ statement = select(Invitations).where(col(Invitations.invitation_token) == invitation_token)
2554
+
2555
+ result = (await session.execute(statement)).one_or_none()
2556
+ if result is None:
2557
+ return None
2558
+ result = result[0]
2559
+ user_session = Invitations.model_validate(result)
2560
+
2561
+ return user_session.invitation_value
2562
+
2563
+ async def delete_invitation(self, invitation_token: str) -> bool:
2564
+ async with self.get_session() as session:
2565
+ try:
2566
+ statement = delete(Invitations).where(col(Invitations.invitation_token) == invitation_token)
2567
+ await session.execute(statement)
2568
+ return True
2569
+ except Exception as e:
2570
+ print("[!remove_sql_user_session]", e)
2571
+ return False
2572
+
2573
+ async def set_url_shortner(self, token_uuid: str, url: str):
2574
+ async with self.get_session() as session:
2575
+ try:
2576
+ session.add(
2577
+ URLShorts(
2578
+ uuid=uuid4(),
2579
+ token_uuid=token_uuid,
2580
+ url=url,
2581
+ timestamp=datetime.now(),
2582
+ )
2583
+ )
2584
+ except Exception as e:
2585
+ print("[!set_url_shortner]", e)
2586
+
2587
+ async def get_url_shortner(self, token_uuid: str) -> str | None:
2588
+ async with self.get_session() as session:
2589
+ statement = select(URLShorts).where(URLShorts.token_uuid == token_uuid)
2590
+
2591
+ result = (await session.execute(statement)).one_or_none()
2592
+ if result is None:
2593
+ return None
2594
+ result = result[0]
2595
+ url_shortner = URLShorts.model_validate(result)
2596
+ if settings.url_shorter_expires + url_shortner.timestamp.timestamp() < time.time():
2597
+ await self.delete_url_shortner(token_uuid)
2598
+ return None
2599
+
2600
+ return url_shortner.url
2601
+
2602
+ async def delete_url_shortner(self, token_uuid: str) -> bool:
2603
+ async with self.get_session() as session:
2604
+ try:
2605
+ statement = delete(URLShorts).where(col(URLShorts.token_uuid) == token_uuid)
2606
+ await session.execute(statement)
2607
+ return True
2608
+ except Exception as e:
2609
+ print("[!remove_sql_user_session]", e)
2610
+ return False
2611
+
2612
+ async def delete_url_shortner_by_token(self, invitation_token: str) -> bool:
2613
+ async with self.get_session() as session:
2614
+ try:
2615
+ statement = delete(URLShorts).where(col(URLShorts.url).ilike(f"%{invitation_token}%"))
2616
+ await session.execute(statement)
2617
+ return True
2618
+ except Exception as e:
2619
+ print("[!delete_url_shortner_by_token]", e)
2620
+ return False
2621
+
2622
+ async def _set_query_final_results(self, query, results):
2623
+ is_aggregation = query.type == QueryType.aggregation
2624
+ is_attachment_query = query.type == QueryType.attachments
2625
+ process_payload = query.type not in [QueryType.history, QueryType.events]
2626
+
2627
+ # Case 1: Attachment query → Direct conversion of all items
2628
+ if is_attachment_query:
2629
+ return [
2630
+ item.to_record(item.subpath, item.shortname)
2631
+ for item in results
2632
+ ]
2633
+
2634
+ # Case 2: Aggregation query → delegate to existing aggregator
2635
+ if is_aggregation:
2636
+ for idx, item in enumerate(results):
2637
+ results = set_results_from_aggregation(query, item, results, idx)
2638
+ return results
2639
+
2640
+ # Case 3: Standard query → convert and optionally fetch attachments
2641
+ attachment_tasks = []
2642
+ attachment_indices = []
2643
+
2644
+ for idx, item in enumerate(results):
2645
+ rec = item.to_record(item.subpath, item.shortname)
2646
+ results[idx] = rec
2647
+
2648
+ if process_payload:
2649
+ # Strip payload body early (if disabled)
2650
+ if not query.retrieve_json_payload:
2651
+ payload = rec.attributes.get("payload", {})
2652
+ if payload and payload.get("body"):
2653
+ payload["body"] = None
2654
+
2655
+ # Queue attachments if requested
2656
+ if query.retrieve_attachments:
2657
+ attachment_tasks.append(
2658
+ self.get_entry_attachments(
2659
+ rec.subpath,
2660
+ Path(f"{query.space_name}/{rec.shortname}"),
2661
+ retrieve_json_payload=True,
2662
+ )
2663
+ )
2664
+ attachment_indices.append(idx)
2665
+
2666
+ # Run all attachment retrievals concurrently
2667
+ if attachment_tasks:
2668
+ attachments_list = await asyncio.gather(*attachment_tasks)
2669
+ for idx, attachments in zip(attachment_indices, attachments_list):
2670
+ results[idx].attachments = attachments
2671
+
2672
+ return results
2673
+
2674
+ async def clear_failed_password_attempts(self, user_shortname: str) -> bool:
2675
+ async with self.get_session() as session:
2676
+ try:
2677
+ statement = select(Users).where(Users.shortname == user_shortname)
2678
+ result = (await session.execute(statement)).one_or_none()
2679
+ if result is None:
2680
+ return False
2681
+ result = result[0]
2682
+ result.attempt_count = 0
2683
+ session.add(result)
2684
+ return True
2685
+ except Exception as e:
2686
+ print("[!clear_failed_password_attempts]", e)
2687
+ return False
2688
+
2689
+ async def get_failed_password_attempt_count(self, user_shortname: str) -> int:
2690
+ async with self.get_session() as session:
2691
+ statement = select(Users).where(col(Users.shortname) == user_shortname)
2692
+
2693
+ result = (await session.execute(statement)).one_or_none()
2694
+ if result is None:
2695
+ return 0
2696
+ result = result[0]
2697
+ failed_login_attempt = Users.model_validate(result)
2698
+ return 0 if failed_login_attempt.attempt_count is None else failed_login_attempt.attempt_count
2699
+
2700
+ async def set_failed_password_attempt_count(self, user_shortname: str, attempt_count: int) -> bool:
2701
+ async with self.get_session() as session:
2702
+ try:
2703
+ statement = select(Users).where(col(Users.shortname) == user_shortname)
2704
+ result = (await session.execute(statement)).one_or_none()
2705
+ if result is None:
2706
+ return False
2707
+ result = result[0]
2708
+ result.attempt_count = attempt_count
2709
+ session.add(result)
2710
+ return True
2711
+ except Exception as e:
2712
+ print("[!set_failed_password_attempt_count]", e)
2713
+ return False
2714
+
2715
+ async def get_spaces(self) -> dict:
2716
+ async with self.get_session() as session:
2717
+ statement = select(Spaces)
2718
+ results = (await session.execute(statement)).all()
2719
+ results = [result[0] for result in results]
2720
+ spaces = {}
2721
+ for idx, item in enumerate(results):
2722
+ space = Spaces.model_validate(item)
2723
+ spaces[space.shortname] = space.model_dump()
2724
+ return spaces
2725
+
2726
+ async def get_media_attachment(self, space_name: str, subpath: str, shortname: str) -> io.BytesIO | None:
2727
+ if not subpath.startswith("/"):
2728
+ subpath = f"/{subpath}"
2729
+
2730
+ async with self.get_session() as session:
2731
+ statement = select(Attachments.media) \
2732
+ .where(Attachments.space_name == space_name) \
2733
+ .where(Attachments.subpath == subpath) \
2734
+ .where(Attachments.shortname == shortname)
2735
+
2736
+ result = (await session.execute(statement)).one_or_none()
2737
+ if result:
2738
+ result = result[0]
2739
+ return io.BytesIO(result)
2740
+ return None
2741
+
2742
+ async def validate_uniqueness(
2743
+ self, space_name: str, record: core.Record, action: str = api.RequestType.create, user_shortname=None
2744
+ ) -> bool:
2745
+ """
2746
+ Get list of unique fields from entry's folder meta data
2747
+ ensure that each sub-list in the list is unique across all entries
2748
+ """
2749
+ parent_subpath, folder_shortname = os.path.split(record.subpath)
2750
+ folder_meta = None
2751
+ try:
2752
+ folder_meta = await self.load(space_name, parent_subpath, folder_shortname, core.Folder)
2753
+ except Exception:
2754
+ folder_meta = None
2755
+
2756
+ if folder_meta is None or folder_meta.payload is None or not isinstance(folder_meta.payload.body,
2757
+ dict) or not isinstance(
2758
+ folder_meta.payload.body.get("unique_fields", None), list): # type: ignore
2759
+ return True
2760
+
2761
+ current_user = None
2762
+ if action is api.RequestType.update and record.resource_type is ResourceType.user:
2763
+ current_user = await self.load(space_name, record.subpath, record.shortname, core.User)
2764
+
2765
+ for compound in folder_meta.payload.body["unique_fields"]: # type: ignore
2766
+ query_string = ""
2767
+ for composite_unique_key in compound:
2768
+ value = get_nested_value(record.attributes, composite_unique_key)
2769
+ if value is None or value == "":
2770
+ continue
2771
+ if current_user is not None and hasattr(current_user, composite_unique_key) \
2772
+ and getattr(current_user, composite_unique_key) == value:
2773
+ continue
2774
+
2775
+ query_string += f"@{composite_unique_key}:{value} "
2776
+
2777
+ if query_string == "":
2778
+ continue
2779
+
2780
+ q = api.Query(
2781
+ space_name=space_name,
2782
+ subpath=record.subpath,
2783
+ type=QueryType.subpath,
2784
+ search=query_string
2785
+ )
2786
+ owner = record.attributes.get("owner_shortname", None) if user_shortname is None else user_shortname
2787
+ total, _ = await self.query(q, owner)
2788
+
2789
+ if total != 0:
2790
+ raise API_Exception(
2791
+ status.HTTP_400_BAD_REQUEST,
2792
+ API_Error(
2793
+ type="request",
2794
+ code=InternalErrorCode.DATA_SHOULD_BE_UNIQUE,
2795
+ message=f"Entry properties should be unique: {query_string}",
2796
+ ),
2797
+ )
2798
+ return True
2799
+
2800
+ async def validate_payload_with_schema(
2801
+ self,
2802
+ payload_data: UploadFile | dict,
2803
+ space_name: str,
2804
+ schema_shortname: str,
2805
+ ):
2806
+ if not isinstance(payload_data, (dict, UploadFile)):
2807
+ raise API_Exception(
2808
+ status.HTTP_400_BAD_REQUEST,
2809
+ API_Error(
2810
+ type="request",
2811
+ code=InternalErrorCode.INVALID_DATA,
2812
+ message="Invalid payload.body",
2813
+ ),
2814
+ )
2815
+
2816
+ if schema_shortname in ["folder_rendering", "meta_schema"]:
2817
+ space_name = "management"
2818
+ schema = await self.load(space_name, "/schema", schema_shortname, core.Schema)
2819
+ if schema.payload:
2820
+ schema = schema.payload.model_dump()['body']
2821
+
2822
+ if not isinstance(payload_data, dict):
2823
+ data = json.load(payload_data.file)
2824
+ payload_data.file.seek(0)
2825
+ else:
2826
+ data = payload_data
2827
+
2828
+ Draft7Validator(schema).validate(data) # type: ignore
2829
+
2830
+ async def get_schema(self, space_name: str, schema_shortname: str, owner_shortname: str) -> dict:
2831
+ schema_content = await self.load(
2832
+ space_name=space_name,
2833
+ subpath="/schema",
2834
+ shortname=schema_shortname,
2835
+ class_type=core.Schema,
2836
+ user_shortname=owner_shortname,
2837
+ )
2838
+
2839
+ if schema_content and schema_content.payload and isinstance(schema_content.payload.body, dict):
2840
+ return resolve_schema_references(schema_content.payload.body)
2841
+
2842
+ return {}
2843
+
2844
+ async def check_uniqueness(self, unique_fields, search_str, redis_escape_chars) -> dict:
2845
+ for key, value in unique_fields.items():
2846
+ if value is None:
2847
+ continue
2848
+ if key == "email_unescaped":
2849
+ key = "email"
2850
+
2851
+ result = await self.get_entry_by_criteria({key: value}, Users)
2852
+
2853
+ if result is not None:
2854
+ return {"unique": False, "field": key}
2855
+
2856
+ return {"unique": True}
2857
+
2858
+ # async def ensure_authz_materialized_views_fresh(self) -> None:
2859
+ # try:
2860
+ # async with self.get_session() as session:
2861
+ # latest_q = text(
2862
+ # """
2863
+ # SELECT GREATEST(
2864
+ # COALESCE((SELECT MAX(updated_at) FROM users), to_timestamp(0)),
2865
+ # COALESCE((SELECT MAX(updated_at) FROM roles), to_timestamp(0)),
2866
+ # COALESCE((SELECT MAX(updated_at) FROM permissions), to_timestamp(0))
2867
+ # ) AS max_ts
2868
+ # """
2869
+ # )
2870
+ # latest_ts_row = (await session.execute(latest_q)).one()
2871
+ # max_ts = latest_ts_row[0]
2872
+ #
2873
+ # meta_row = (
2874
+ # await session.execute(text("SELECT last_source_ts FROM authz_mv_meta WHERE id = 1"))).one_or_none()
2875
+ # if meta_row is None or (meta_row[0] is None) or (max_ts is not None and max_ts > meta_row[0]):
2876
+ # await session.execute(text("REFRESH MATERIALIZED VIEW mv_user_roles"))
2877
+ # await session.execute(text("REFRESH MATERIALIZED VIEW mv_role_permissions"))
2878
+ # await session.execute(text("""
2879
+ # INSERT INTO authz_mv_meta(id, last_source_ts, refreshed_at)
2880
+ # VALUES (1, :ts, now())
2881
+ # ON CONFLICT (id)
2882
+ # DO UPDATE SET last_source_ts = EXCLUDED.last_source_ts,
2883
+ # refreshed_at = now()
2884
+ # """), {"ts": max_ts})
2885
+ # except Exception as e:
2886
+ # logger.warning(f"AuthZ MV refresh failed or skipped: {e}")
2887
+ #
2888
+ # async def _bulk_load_by_shortnames(self, class_type: Type[MetaChild], shortnames: list[str]) -> dict[
2889
+ # str, MetaChild]:
2890
+ # if not shortnames:
2891
+ # return {}
2892
+ # table = self.get_table(class_type)
2893
+ # items: dict[str, MetaChild] = {}
2894
+ # async with self.get_session() as session:
2895
+ # res = await session.execute(
2896
+ # select(table).where(col(table.shortname).in_(shortnames))
2897
+ # )
2898
+ # rows = [r[0] for r in res.all()]
2899
+ # for row in rows:
2900
+ # model_obj = class_type.model_validate(row.model_dump())
2901
+ # items[getattr(row, 'shortname')] = model_obj
2902
+ # return items
2903
+
2904
+ async def get_role_permissions(self, role: core.Role) -> list[core.Permission]:
2905
+ role_records = await self.load_or_none(
2906
+ settings.management_space, 'roles', role.shortname, core.Role
2907
+ )
2908
+ if role_records is None:
2909
+ return []
2910
+ role_permissions: list[core.Permission] = []
2911
+ for permission in role_records.permissions:
2912
+ permission_record = await self.load_or_none(
2913
+ settings.management_space, 'permissions', permission, core.Permission
2914
+ )
2915
+ if permission_record is None:
2916
+ continue
2917
+ role_permissions.append(permission_record)
2918
+ return role_permissions
2919
+
2920
+ async def get_user_roles(self, user_shortname: str) -> dict[str, core.Role]:
2921
+ try:
2922
+ user = await self.load_or_none(
2923
+ settings.management_space, settings.users_subpath, user_shortname, core.User
2924
+ )
2925
+ if user is None:
2926
+ return {}
2927
+ euser_roles: dict[str, core.Role] = {}
2928
+ if user_shortname != "anonymous":
2929
+ role_record = await self.load_or_none(
2930
+ settings.management_space, 'roles', 'logged_in', core.Role
2931
+ )
2932
+ if role_record is not None:
2933
+ euser_roles['logged_in'] = role_record
2934
+ for role in user.roles:
2935
+ role_record = await self.load_or_none(
2936
+ settings.management_space, 'roles', role, core.Role
2937
+ )
2938
+ if role_record is None:
2939
+ continue
2940
+ euser_roles[role] = role_record
2941
+ return euser_roles
2942
+ except Exception as e2:
2943
+ print(f"Error: {e2}")
2944
+ return {}
2945
+
2946
+ async def load_user_meta(self, user_shortname: str) -> Any:
2947
+ user = await self.load(
2948
+ space_name=settings.management_space,
2949
+ shortname=user_shortname,
2950
+ subpath="users",
2951
+ class_type=core.User,
2952
+ user_shortname=user_shortname,
2953
+ )
2954
+
2955
+ return user
2956
+
2957
+ async def generate_user_permissions(self, user_shortname: str) -> dict:
2958
+ user_permissions: dict = {}
2959
+
2960
+ user_roles = await self.get_user_roles(user_shortname)
2961
+
2962
+ for _, role in user_roles.items():
2963
+ role_permissions = await self.get_role_permissions(role)
2964
+ if user_shortname == "anonymous":
2965
+ permission_world_record = await self.load_or_none(settings.management_space, 'permissions', "world",
2966
+ core.Permission)
2967
+ if permission_world_record:
2968
+ role_permissions.append(permission_world_record)
2969
+
2970
+ for permission in role_permissions:
2971
+ for space_name, permission_subpaths in permission.subpaths.items():
2972
+ for permission_subpath in permission_subpaths:
2973
+ permission_subpath = trans_magic_words(permission_subpath, user_shortname)
2974
+ for permission_resource_types in permission.resource_types:
2975
+ actions = set(permission.actions)
2976
+ conditions = set(permission.conditions)
2977
+ if (
2978
+ f"{space_name}:{permission_subpath}:{permission_resource_types}"
2979
+ in user_permissions
2980
+ ):
2981
+ old_perm = user_permissions[
2982
+ f"{space_name}:{permission_subpath}:{permission_resource_types}"
2983
+ ]
2984
+
2985
+ if isinstance(actions, list):
2986
+ actions = set(actions)
2987
+ actions |= set(old_perm["allowed_actions"])
2988
+
2989
+ if isinstance(conditions, list):
2990
+ conditions = set(conditions)
2991
+ conditions |= set(old_perm["conditions"])
2992
+
2993
+ user_permissions[
2994
+ f"{space_name}:{permission_subpath}:{permission_resource_types}"
2995
+ ] = {
2996
+ "allowed_actions": list(actions),
2997
+ "conditions": list(conditions),
2998
+ "restricted_fields": permission.restricted_fields,
2999
+ "allowed_fields_values": permission.allowed_fields_values,
3000
+ "filter_fields_values": permission.filter_fields_values
3001
+ }
3002
+ return user_permissions
3003
+
3004
+ async def get_user_permissions(self, user_shortname: str) -> dict:
3005
+ return await self.generate_user_permissions(user_shortname)
3006
+
3007
+ async def get_user_by_criteria(self, key: str, value: str) -> str | None:
3008
+ _user = await self.get_entry_by_criteria(
3009
+ {key: value},
3010
+ Users
3011
+ )
3012
+ if _user is None:
3013
+ return None
3014
+ return str(_user.shortname)
3015
+
3016
+ async def get_payload_from_event(self, event) -> dict:
3017
+ notification_request_meta = await self.load(
3018
+ event.space_name,
3019
+ event.subpath,
3020
+ event.shortname,
3021
+ getattr(sys_modules["models.core"], camel_case(event.resource_type)),
3022
+ event.user_shortname,
3023
+ )
3024
+ return notification_request_meta.payload.body # type: ignore
3025
+
3026
+ async def get_user_roles_from_groups(self, user_meta: core.User) -> list:
3027
+ return []
3028
+
3029
+ async def drop_index(self, space_name):
3030
+ pass
3031
+
3032
+ async def initialize_spaces(self) -> None:
3033
+ async with self.get_session() as session:
3034
+ try:
3035
+ (await session.execute(select(Spaces).limit(1))).one_or_none()
3036
+ except Exception as e:
3037
+ print(f"Error: {e}")
3038
+ try:
3039
+ loop = asyncio.get_event_loop()
3040
+ loop.stop()
3041
+ except RuntimeError as e:
3042
+ print(f"Error: {e}")
3043
+
3044
+ async def create_user_premission_index(self) -> None:
3045
+ pass
3046
+
3047
+ async def store_modules_to_redis(self, roles, groups, permissions) -> None:
3048
+ pass
3049
+
3050
+ async def delete_user_permissions_map_in_redis(self) -> None:
3051
+ pass
3052
+
3053
+ async def internal_save_model(
3054
+ self,
3055
+ space_name: str,
3056
+ subpath: str,
3057
+ meta: core.Meta,
3058
+ payload: dict | None = None
3059
+ ):
3060
+ await self.save(
3061
+ space_name=space_name,
3062
+ subpath=subpath,
3063
+ meta=meta,
3064
+ )
3065
+
3066
+ async def internal_sys_update_model(
3067
+ self,
3068
+ space_name: str,
3069
+ subpath: str,
3070
+ meta: core.Meta,
3071
+ updates: dict,
3072
+ sync_redis: bool = True,
3073
+ payload_dict: dict[str, Any] = {},
3074
+ ):
3075
+ meta.updated_at = datetime.now()
3076
+ meta_updated = False
3077
+ payload_updated = False
3078
+
3079
+ if not payload_dict:
3080
+ try:
3081
+ if meta.payload and isinstance(meta.payload.body, dict):
3082
+ # Payload body is already loaded
3083
+ payload_dict = meta.payload.body
3084
+
3085
+ elif meta.payload and isinstance(meta.payload.body, str):
3086
+ # Payload body is the filename string
3087
+ mydict = await self.load_resource_payload(
3088
+ space_name, subpath, meta.payload.body, type(meta)
3089
+ )
3090
+ payload_dict = mydict if mydict else {}
3091
+ except Exception:
3092
+ pass
3093
+
3094
+ restricted_fields = [
3095
+ "uuid",
3096
+ "shortname",
3097
+ "created_at",
3098
+ "updated_at",
3099
+ "owner_shortname",
3100
+ "payload",
3101
+ ]
3102
+ old_version_flattend = {**meta.model_dump()}
3103
+ for key, value in updates.items():
3104
+ if key in restricted_fields:
3105
+ continue
3106
+
3107
+ if key in meta.model_fields.keys():
3108
+ meta_updated = True
3109
+ meta.__setattr__(key, value)
3110
+ elif payload_dict:
3111
+ payload_dict[key] = value
3112
+ payload_updated = True
3113
+
3114
+ if meta_updated:
3115
+ await self.update(
3116
+ space_name,
3117
+ subpath,
3118
+ meta,
3119
+ old_version_flattend,
3120
+ {**meta.model_dump()},
3121
+ list(updates.keys()),
3122
+ meta.shortname
3123
+ )
3124
+ if payload_updated and meta.payload and meta.payload.schema_shortname:
3125
+ await self.validate_payload_with_schema(
3126
+ payload_dict, space_name, meta.payload.schema_shortname
3127
+ )
3128
+ await self.save_payload_from_json(
3129
+ space_name, subpath, meta, payload_dict
3130
+ )
3131
+
3132
+ async def get_entry_by_var(
3133
+ self,
3134
+ key: str,
3135
+ val: str,
3136
+ logged_in_user,
3137
+ retrieve_json_payload: bool = False,
3138
+ retrieve_attachments: bool = False,
3139
+ retrieve_lock_status: bool = False,
3140
+ ) -> core.Record:
3141
+ _result = await self.get_entry_by_criteria({key: val})
3142
+
3143
+ if _result is None:
3144
+ raise api.Exception(
3145
+ status.HTTP_400_BAD_REQUEST,
3146
+ error=api.Error(
3147
+ type="media", code=InternalErrorCode.OBJECT_NOT_FOUND, message="Request object is not available"
3148
+ ),
3149
+ )
3150
+
3151
+ from utils.access_control import access_control
3152
+ if not await access_control.check_access(
3153
+ user_shortname=logged_in_user,
3154
+ space_name=_result.attributes['space_name'],
3155
+ subpath=_result.subpath,
3156
+ resource_type=_result.resource_type,
3157
+ action_type=core.ActionType.view,
3158
+ resource_is_active=_result.attributes['is_active'],
3159
+ resource_owner_shortname=_result.attributes['owner_shortname'],
3160
+ resource_owner_group=_result.attributes['owner_group_shortname'],
3161
+ entry_shortname=_result.shortname
3162
+ ):
3163
+ raise api.Exception(
3164
+ status.HTTP_401_UNAUTHORIZED,
3165
+ api.Error(
3166
+ type="request",
3167
+ code=InternalErrorCode.NOT_ALLOWED,
3168
+ message="You don't have permission to this action [42]",
3169
+ )
3170
+ )
3171
+
3172
+ return _result
3173
+
3174
+ async def delete_space(self, space_name, record, owner_shortname):
3175
+ resource_obj = core.Meta.from_record(
3176
+ record=record, owner_shortname=owner_shortname
3177
+ )
3178
+ await self.delete(space_name, record.subpath, resource_obj, owner_shortname)
3179
+ os.system(f"rm -r {settings.spaces_folder}/{space_name}")
3180
+
3181
+ async def get_last_updated_entry(
3182
+ self,
3183
+ space_name: str,
3184
+ schema_names: list,
3185
+ retrieve_json_payload: bool,
3186
+ logged_in_user: str,
3187
+ ):
3188
+ pass
3189
+
3190
+ async def get_group_users(self, group_name: str):
3191
+ async with self.get_session() as session:
3192
+ statement = select(Users.shortname).where(col(Users.groups).contains([group_name]))
3193
+ result = await session.execute(statement)
3194
+ shortnames = result.scalars().all()
3195
+ return shortnames
3196
+
3197
+ async def is_user_verified(self, user_shortname: str | None, identifier: str | None) -> bool:
3198
+ async with self.get_session() as session:
3199
+ statement = select(Users).where(Users.shortname == user_shortname)
3200
+ result = (await session.execute(statement)).one_or_none()
3201
+
3202
+ if result is None:
3203
+ return False
3204
+ user = Users.model_validate(result[0])
3205
+
3206
+ if identifier == "msisdn":
3207
+ return user.is_msisdn_verified
3208
+ if identifier == "email":
3209
+ return user.is_email_verified
3210
+ return False