dmart 1.4.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alembic.ini +117 -0
- api/__init__.py +0 -0
- api/info/__init__.py +0 -0
- api/info/router.py +109 -0
- api/managed/__init__.py +0 -0
- api/managed/router.py +1541 -0
- api/managed/utils.py +1879 -0
- api/public/__init__.py +0 -0
- api/public/router.py +758 -0
- api/qr/__init__.py +0 -0
- api/qr/router.py +108 -0
- api/user/__init__.py +0 -0
- api/user/model/__init__.py +0 -0
- api/user/model/errors.py +14 -0
- api/user/model/requests.py +165 -0
- api/user/model/responses.py +11 -0
- api/user/router.py +1413 -0
- api/user/service.py +270 -0
- bundler.py +55 -0
- config/__init__.py +0 -0
- config/channels.json +11 -0
- config/notification.json +17 -0
- cxb/__init__.py +0 -0
- cxb/client/__init__.py +0 -0
- cxb/client/assets/@codemirror-Rn7_6DkE.js +10 -0
- cxb/client/assets/@edraj-CS4NwVbD.js +1 -0
- cxb/client/assets/@floating-ui-BwwcF-xh.js +1 -0
- cxb/client/assets/@formatjs-yKEsAtjs.js +1 -0
- cxb/client/assets/@fortawesome-DRW1UCdr.js +9 -0
- cxb/client/assets/@jsonquerylang-laKNoFFq.js +12 -0
- cxb/client/assets/@lezer-za4Q-8Ew.js +1 -0
- cxb/client/assets/@marijn-DXwl3gUT.js +1 -0
- cxb/client/assets/@popperjs-l0sNRNKZ.js +1 -0
- cxb/client/assets/@replit--ERk53eB.js +1 -0
- cxb/client/assets/@roxi-CGMFK4i8.js +6 -0
- cxb/client/assets/@typewriter-cCzskkIv.js +17 -0
- cxb/client/assets/@zerodevx-BlBZjKxu.js +1 -0
- cxb/client/assets/@zerodevx-CVEpe6WZ.css +1 -0
- cxb/client/assets/BreadCrumbLite-DAhOx38v.js +1 -0
- cxb/client/assets/EntryRenderer-25YDhRen.js +32 -0
- cxb/client/assets/EntryRenderer-DXytdFp9.css +1 -0
- cxb/client/assets/ListView-BpAycA2h.js +16 -0
- cxb/client/assets/ListView-U8of-_c-.css +1 -0
- cxb/client/assets/Prism--hMplq-p.js +3 -0
- cxb/client/assets/Prism-Uh6uStUw.css +1 -0
- cxb/client/assets/Table2Cols-BsbwicQm.js +1 -0
- cxb/client/assets/_..-BvT6vdHa.css +1 -0
- cxb/client/assets/_...404_-fuLH_rX9.js +2 -0
- cxb/client/assets/_...fallback_-Ba_NLmAE.js +1 -0
- cxb/client/assets/_module-Bfk8MiCs.js +3 -0
- cxb/client/assets/_module-CEW0D5oI.js +4 -0
- cxb/client/assets/_module-Dgq0ZVtz.js +1 -0
- cxb/client/assets/ajv-Cpj98o6Y.js +1 -0
- cxb/client/assets/axios-CG2WSiiR.js +6 -0
- cxb/client/assets/clsx-B-dksMZM.js +1 -0
- cxb/client/assets/codemirror-wrapped-line-indent-DPhKvljI.js +1 -0
- cxb/client/assets/compare-C3AjiGFR.js +1 -0
- cxb/client/assets/compute-scroll-into-view-Bl8rNFhg.js +1 -0
- cxb/client/assets/consolite-DlCuI0F9.js +1 -0
- cxb/client/assets/crelt-C8TCjufn.js +1 -0
- cxb/client/assets/date-fns-l0sNRNKZ.js +1 -0
- cxb/client/assets/deepmerge-rn4rBaHU.js +1 -0
- cxb/client/assets/dmart_services-AL6-IdDE.js +1 -0
- cxb/client/assets/downloadFile-D08i0YDh.js +1 -0
- cxb/client/assets/easy-signal-BiPFIK3O.js +1 -0
- cxb/client/assets/esm-env-rsSWfq8L.js +1 -0
- cxb/client/assets/export-OF_rTiXu.js +1 -0
- cxb/client/assets/fast-deep-equal-l0sNRNKZ.js +1 -0
- cxb/client/assets/fast-diff-C-IidNf4.js +1 -0
- cxb/client/assets/fast-uri-l0sNRNKZ.js +1 -0
- cxb/client/assets/flowbite-svelte-BLvjb-sa.js +1 -0
- cxb/client/assets/flowbite-svelte-CD54FDqW.css +1 -0
- cxb/client/assets/flowbite-svelte-icons-BI8GVhw_.js +1 -0
- cxb/client/assets/github-slugger-CQ4oX9Ud.js +1 -0
- cxb/client/assets/global-igKv-1g9.js +1 -0
- cxb/client/assets/hookar-BMRD9G9H.js +1 -0
- cxb/client/assets/immutable-json-patch-DtRO2E_S.js +1 -0
- cxb/client/assets/import-1vE3gBat.js +1 -0
- cxb/client/assets/index-B-eTh-ZX.js +1 -0
- cxb/client/assets/index-BVyxzKtH.js +1 -0
- cxb/client/assets/index-BdeNM69f.js +1 -0
- cxb/client/assets/index-C6cPO4op.js +1 -0
- cxb/client/assets/index-CC-A1ipE.js +1 -0
- cxb/client/assets/index-CTxJ-lDp.js +1 -0
- cxb/client/assets/index-Cd-F5j_k.js +1 -0
- cxb/client/assets/index-D742rwaM.js +1 -0
- cxb/client/assets/index-DTfhnhwd.js +1 -0
- cxb/client/assets/index-DdXRK7n9.js +2 -0
- cxb/client/assets/index-DtiCmB4o.js +1 -0
- cxb/client/assets/index-NBrXBlLA.css +2 -0
- cxb/client/assets/index-ac-Buu_H.js +4 -0
- cxb/client/assets/index-iYkH7C67.js +1 -0
- cxb/client/assets/info-B986lRiM.js +1 -0
- cxb/client/assets/intl-messageformat-Dc5UU-HB.js +3 -0
- cxb/client/assets/jmespath-l0sNRNKZ.js +1 -0
- cxb/client/assets/json-schema-traverse-l0sNRNKZ.js +1 -0
- cxb/client/assets/json-source-map-DRgZidqy.js +5 -0
- cxb/client/assets/jsonpath-plus-l0sNRNKZ.js +1 -0
- cxb/client/assets/jsonrepair-B30Dx381.js +8 -0
- cxb/client/assets/lodash-es-DZVAA2ox.js +1 -0
- cxb/client/assets/marked-DKjyhwJX.js +56 -0
- cxb/client/assets/marked-gfm-heading-id-U5zO829x.js +2 -0
- cxb/client/assets/marked-mangle-CDMeiHC6.js +1 -0
- cxb/client/assets/memoize-one-BdPwpGay.js +1 -0
- cxb/client/assets/natural-compare-lite-Bg2Xcf-o.js +7 -0
- cxb/client/assets/pagination-svelte-D5CyoiE_.js +13 -0
- cxb/client/assets/pagination-svelte-v10nAbbM.css +1 -0
- cxb/client/assets/plantuml-encoder-C47mzt9T.js +1 -0
- cxb/client/assets/prismjs-DTUiLGJu.js +9 -0
- cxb/client/assets/profile-BUf-tKMe.js +1 -0
- cxb/client/assets/query-CNmXTsgf.js +1 -0
- cxb/client/assets/queryHelpers-C9iBWwqe.js +1 -0
- cxb/client/assets/scroll-into-view-if-needed-KR58zyjF.js +1 -0
- cxb/client/assets/spaces-0oyGvpii.js +1 -0
- cxb/client/assets/style-mod-Bs6eFhZE.js +3 -0
- cxb/client/assets/svelte-B2XmcTi_.js +4 -0
- cxb/client/assets/svelte-awesome-COLlx0DN.css +1 -0
- cxb/client/assets/svelte-awesome-DhnMA6Q_.js +1 -0
- cxb/client/assets/svelte-datatables-net-CY7LBj6I.js +1 -0
- cxb/client/assets/svelte-floating-ui-BlS3sOAQ.js +1 -0
- cxb/client/assets/svelte-i18n-CT2KkQaN.js +3 -0
- cxb/client/assets/svelte-jsoneditor-BzfX6Usi.css +1 -0
- cxb/client/assets/svelte-jsoneditor-CUGSvWId.js +25 -0
- cxb/client/assets/svelte-select-CegQKzqH.css +1 -0
- cxb/client/assets/svelte-select-CjHAt_85.js +6 -0
- cxb/client/assets/tailwind-merge-CJvxXMcu.js +1 -0
- cxb/client/assets/tailwind-variants-Cj20BoQ3.js +1 -0
- cxb/client/assets/toast-B9WDyfyI.js +1 -0
- cxb/client/assets/tslib-pJfR_DrR.js +1 -0
- cxb/client/assets/typewriter-editor-DkTVIJdm.js +25 -0
- cxb/client/assets/user-DeK_NB5v.js +1 -0
- cxb/client/assets/vanilla-picker-l5rcX3cq.js +8 -0
- cxb/client/assets/w3c-keyname-Vcq4gwWv.js +1 -0
- cxb/client/config.json +11 -0
- cxb/client/config.sample.json +11 -0
- cxb/client/favicon.ico +0 -0
- cxb/client/favicon.png +0 -0
- cxb/client/index.html +28 -0
- data_adapters/__init__.py +0 -0
- data_adapters/adapter.py +16 -0
- data_adapters/base_data_adapter.py +467 -0
- data_adapters/file/__init__.py +0 -0
- data_adapters/file/adapter.py +2043 -0
- data_adapters/file/adapter_helpers.py +1013 -0
- data_adapters/file/archive.py +150 -0
- data_adapters/file/create_index.py +331 -0
- data_adapters/file/create_users_folders.py +52 -0
- data_adapters/file/custom_validations.py +68 -0
- data_adapters/file/drop_index.py +40 -0
- data_adapters/file/health_check.py +560 -0
- data_adapters/file/redis_services.py +1110 -0
- data_adapters/helpers.py +27 -0
- data_adapters/sql/__init__.py +0 -0
- data_adapters/sql/adapter.py +3218 -0
- data_adapters/sql/adapter_helpers.py +491 -0
- data_adapters/sql/create_tables.py +451 -0
- data_adapters/sql/create_users_folders.py +53 -0
- data_adapters/sql/db_to_json_migration.py +485 -0
- data_adapters/sql/health_check_sql.py +232 -0
- data_adapters/sql/json_to_db_migration.py +454 -0
- data_adapters/sql/update_query_policies.py +101 -0
- data_generator.py +81 -0
- dmart-1.4.17.dist-info/METADATA +65 -0
- dmart-1.4.17.dist-info/RECORD +289 -0
- dmart-1.4.17.dist-info/WHEEL +5 -0
- dmart-1.4.17.dist-info/entry_points.txt +2 -0
- dmart-1.4.17.dist-info/top_level.txt +24 -0
- dmart.py +623 -0
- dmart_migrations/README +1 -0
- dmart_migrations/__init__.py +0 -0
- dmart_migrations/__pycache__/__init__.cpython-314.pyc +0 -0
- dmart_migrations/__pycache__/env.cpython-314.pyc +0 -0
- dmart_migrations/env.py +100 -0
- dmart_migrations/notes.txt +11 -0
- dmart_migrations/script.py.mako +28 -0
- dmart_migrations/scripts/__init__.py +0 -0
- dmart_migrations/scripts/calculate_checksums.py +77 -0
- dmart_migrations/scripts/migration_f7a4949eed19.py +28 -0
- dmart_migrations/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- dmart_migrations/versions/10d2041b94d4_last_checksum_history.py +62 -0
- dmart_migrations/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- dmart_migrations/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- dmart_migrations/versions/3c8bca2219cc_add_otp_table.py +38 -0
- dmart_migrations/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- dmart_migrations/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- dmart_migrations/versions/74288ccbd3b5_initial.py +264 -0
- dmart_migrations/versions/7520a89a8467_rm_activesession_table.py +39 -0
- dmart_migrations/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- dmart_migrations/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- dmart_migrations/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- dmart_migrations/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- dmart_migrations/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- dmart_migrations/versions/__init__.py +0 -0
- dmart_migrations/versions/__pycache__/0f3d2b1a7c21_add_authz_materialized_views.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/10d2041b94d4_last_checksum_history.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/26bfe19b49d4_rm_failedloginattempts.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/3c8bca2219cc_add_otp_table.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/6675fd9dfe42_remove_unique_from_sessions_table.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/71bc1df82e6a_adding_user_last_login_at.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/74288ccbd3b5_initial.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/7520a89a8467_rm_activesession_table.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/848b623755a4_make_created_nd_updated_at_required.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/8640dcbebf85_add_notes_to_users.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/91c94250232a_adding_fk_on_owner_shortname.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/98ecd6f56f9a_ext_meta_with_owner_group_shortname.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/9aae9138c4ef_indexing_created_at_updated_at.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/b53f916b3f6d_json_to_jsonb.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/eb5f1ec65156_adding_user_locked_to_device.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/f7a4949eed19_adding_query_policies_to_meta.cpython-314.pyc +0 -0
- dmart_migrations/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- dmart_migrations/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- dmart_migrations/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- get_settings.py +7 -0
- info.json +1 -0
- languages/__init__.py +0 -0
- languages/arabic.json +15 -0
- languages/english.json +16 -0
- languages/kurdish.json +14 -0
- languages/loader.py +12 -0
- main.py +560 -0
- migrate.py +24 -0
- models/__init__.py +0 -0
- models/api.py +203 -0
- models/core.py +597 -0
- models/enums.py +255 -0
- password_gen.py +8 -0
- plugins/__init__.py +0 -0
- plugins/action_log/__init__.py +0 -0
- plugins/action_log/plugin.py +121 -0
- plugins/admin_notification_sender/__init__.py +0 -0
- plugins/admin_notification_sender/plugin.py +124 -0
- plugins/ldap_manager/__init__.py +0 -0
- plugins/ldap_manager/plugin.py +100 -0
- plugins/local_notification/__init__.py +0 -0
- plugins/local_notification/plugin.py +123 -0
- plugins/realtime_updates_notifier/__init__.py +0 -0
- plugins/realtime_updates_notifier/plugin.py +58 -0
- plugins/redis_db_update/__init__.py +0 -0
- plugins/redis_db_update/plugin.py +188 -0
- plugins/resource_folders_creation/__init__.py +0 -0
- plugins/resource_folders_creation/plugin.py +81 -0
- plugins/system_notification_sender/__init__.py +0 -0
- plugins/system_notification_sender/plugin.py +188 -0
- plugins/update_access_controls/__init__.py +0 -0
- plugins/update_access_controls/plugin.py +9 -0
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +16 -0
- pytests/api_user_models_requests_test.py +98 -0
- pytests/archive_test.py +72 -0
- pytests/base_test.py +300 -0
- pytests/get_settings_test.py +14 -0
- pytests/json_to_db_migration_test.py +237 -0
- pytests/service_test.py +26 -0
- pytests/test_info.py +55 -0
- pytests/test_status.py +15 -0
- run_notification_campaign.py +85 -0
- scheduled_notification_handler.py +121 -0
- schema_migration.py +208 -0
- schema_modulate.py +192 -0
- set_admin_passwd.py +55 -0
- sync.py +202 -0
- utils/__init__.py +0 -0
- utils/access_control.py +306 -0
- utils/async_request.py +8 -0
- utils/exporter.py +309 -0
- utils/firebase_notifier.py +57 -0
- utils/generate_email.py +37 -0
- utils/helpers.py +352 -0
- utils/hypercorn_config.py +12 -0
- utils/internal_error_code.py +60 -0
- utils/jwt.py +124 -0
- utils/logger.py +167 -0
- utils/middleware.py +99 -0
- utils/notification.py +75 -0
- utils/password_hashing.py +16 -0
- utils/plugin_manager.py +202 -0
- utils/query_policies_helper.py +128 -0
- utils/regex.py +44 -0
- utils/repository.py +529 -0
- utils/router_helper.py +19 -0
- utils/settings.py +166 -0
- utils/sms_notifier.py +21 -0
- utils/social_sso.py +67 -0
- utils/templates/activation.html.j2 +26 -0
- utils/templates/reminder.html.j2 +17 -0
- utils/ticket_sys_utils.py +203 -0
- utils/web_notifier.py +29 -0
- websocket.py +231 -0
|
@@ -0,0 +1,1110 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import json
|
|
3
|
+
import sys
|
|
4
|
+
from typing import Any, Awaitable
|
|
5
|
+
from redis.asyncio import Redis
|
|
6
|
+
from redis.asyncio.connection import BlockingConnectionPool
|
|
7
|
+
from models.api import RedisReducer, SortType
|
|
8
|
+
import models.core as core
|
|
9
|
+
from models.enums import ActionType, RedisReducerName, ResourceType, LockAction
|
|
10
|
+
from redis.commands.json.path import Path
|
|
11
|
+
from redis.commands.search.field import TextField, NumericField, TagField, Field
|
|
12
|
+
from redis.commands.search.index_definition import IndexDefinition, IndexType
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
|
|
15
|
+
from redis.commands.search import Search, aggregation
|
|
16
|
+
from redis.commands.search.query import Query
|
|
17
|
+
from utils.helpers import camel_case, resolve_schema_references
|
|
18
|
+
from utils.internal_error_code import InternalErrorCode
|
|
19
|
+
from utils.query_policies_helper import generate_query_policies
|
|
20
|
+
from utils.settings import settings
|
|
21
|
+
import models.api as api
|
|
22
|
+
from fastapi import status
|
|
23
|
+
from fastapi.logger import logger
|
|
24
|
+
import redis
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class RedisServices(Redis):
|
|
28
|
+
META_SCHEMA : list[Field] = [
|
|
29
|
+
TextField("$.uuid", no_stem=True, as_name="uuid"), # type: ignore
|
|
30
|
+
TextField("$.shortname", sortable=True, no_stem=True, as_name="shortname"), # type: ignore
|
|
31
|
+
TextField("$.slug", sortable=True, no_stem=True, as_name="slug"), # type: ignore
|
|
32
|
+
TextField("$.subpath", sortable=True, no_stem=True, as_name="subpath"), # type: ignore
|
|
33
|
+
TagField("$.subpath", as_name="exact_subpath"), # type: ignore
|
|
34
|
+
TextField(
|
|
35
|
+
"$.resource_type",
|
|
36
|
+
sortable=True,
|
|
37
|
+
no_stem=True,
|
|
38
|
+
as_name="resource_type",
|
|
39
|
+
), # type: ignore
|
|
40
|
+
TextField("$.displayname.en", sortable=True, as_name="displayname_en"), # type: ignore
|
|
41
|
+
TextField("$.displayname.ar", sortable=True, as_name="displayname_ar"), # type: ignore
|
|
42
|
+
TextField("$.displayname.ku", sortable=True, as_name="displayname_kd"), # type: ignore
|
|
43
|
+
TextField("$.description.en", sortable=True, as_name="description_en"), # type: ignore
|
|
44
|
+
TextField("$.description.ar", sortable=True, as_name="description_ar"), # type: ignore
|
|
45
|
+
TextField("$.description.ku", sortable=True, as_name="description_kd"), # type: ignore
|
|
46
|
+
TagField("$.is_active", as_name="is_active"), # type: ignore
|
|
47
|
+
TextField(
|
|
48
|
+
"$.payload.content_type",
|
|
49
|
+
no_stem=True,
|
|
50
|
+
as_name="payload_content_type",
|
|
51
|
+
), # type: ignore
|
|
52
|
+
TextField(
|
|
53
|
+
"$.payload.schema_shortname",
|
|
54
|
+
no_stem=True,
|
|
55
|
+
as_name="schema_shortname",
|
|
56
|
+
), # type: ignore
|
|
57
|
+
NumericField("$.created_at", sortable=True, as_name="created_at"), # type: ignore
|
|
58
|
+
NumericField("$.updated_at", sortable=True, as_name="updated_at"), # type: ignore
|
|
59
|
+
TagField("$.view_acl.*", as_name="view_acl"), # type: ignore
|
|
60
|
+
TagField("$.tags.*", as_name="tags"), # type: ignore
|
|
61
|
+
TextField(
|
|
62
|
+
"$.owner_shortname",
|
|
63
|
+
sortable=True,
|
|
64
|
+
no_stem=True,
|
|
65
|
+
as_name="owner_shortname",
|
|
66
|
+
), # type: ignore
|
|
67
|
+
TagField("$.query_policies.*", as_name="query_policies"), # type: ignore
|
|
68
|
+
# User fields
|
|
69
|
+
TextField("$.msisdn", sortable=True, as_name="msisdn"), # type: ignore
|
|
70
|
+
TextField("$.email", sortable=True, as_name="email"), # type: ignore
|
|
71
|
+
TagField("$.email", as_name="email_unescaped"), # type: ignore
|
|
72
|
+
# Ticket fields
|
|
73
|
+
TextField("$.state", sortable=True, no_stem=True, as_name="state"), # type: ignore
|
|
74
|
+
TagField("$.is_open", as_name="is_open"), # type: ignore
|
|
75
|
+
TextField(
|
|
76
|
+
"$.workflow_shortname",
|
|
77
|
+
sortable=True,
|
|
78
|
+
no_stem=True,
|
|
79
|
+
as_name="workflow_shortname",
|
|
80
|
+
), # type: ignore
|
|
81
|
+
TextField(
|
|
82
|
+
"$.collaborators.delivered_by",
|
|
83
|
+
sortable=True,
|
|
84
|
+
no_stem=True,
|
|
85
|
+
as_name="collaborators_delivered_by",
|
|
86
|
+
), # type: ignore
|
|
87
|
+
TextField(
|
|
88
|
+
"$.collaborators.processed_by",
|
|
89
|
+
sortable=True,
|
|
90
|
+
no_stem=True,
|
|
91
|
+
as_name="collaborators_processed_by",
|
|
92
|
+
), # type: ignore
|
|
93
|
+
TextField(
|
|
94
|
+
"$.resolution_reason",
|
|
95
|
+
sortable=True,
|
|
96
|
+
no_stem=True,
|
|
97
|
+
as_name="resolution_reason",
|
|
98
|
+
), # type: ignore
|
|
99
|
+
# Notification fields
|
|
100
|
+
TextField("$.type", sortable=True, no_stem=True, as_name="type"), # type: ignore
|
|
101
|
+
TagField("$.is_read", as_name="is_read"), # type: ignore
|
|
102
|
+
TextField("$.priority", sortable=True, no_stem=True, as_name="priority"), # type: ignore
|
|
103
|
+
TextField("$.reporter.type", sortable=True, as_name="reporter_type"), # type: ignore
|
|
104
|
+
TextField("$.reporter.name", sortable=True, as_name="reporter_name"), # type: ignore
|
|
105
|
+
TextField("$.reporter.channel", sortable=True, as_name="reporter_channel"), # type: ignore
|
|
106
|
+
TextField(
|
|
107
|
+
"$.reporter.distributor",
|
|
108
|
+
sortable=True,
|
|
109
|
+
as_name="reporter_distributor",
|
|
110
|
+
), # type: ignore
|
|
111
|
+
TextField(
|
|
112
|
+
"$.reporter.governorate",
|
|
113
|
+
sortable=True,
|
|
114
|
+
as_name="reporter_governorate",
|
|
115
|
+
), # type: ignore
|
|
116
|
+
TextField(
|
|
117
|
+
"$.reporter.msisdn",
|
|
118
|
+
sortable=True,
|
|
119
|
+
as_name="reporter_msisdn",
|
|
120
|
+
), # type: ignore
|
|
121
|
+
TextField(
|
|
122
|
+
"$.payload_string",
|
|
123
|
+
sortable=False,
|
|
124
|
+
as_name="payload_string",
|
|
125
|
+
), # type: ignore
|
|
126
|
+
] # type: ignore
|
|
127
|
+
|
|
128
|
+
CUSTOM_CLASSES: list[type[core.Meta]] = [
|
|
129
|
+
core.Role,
|
|
130
|
+
core.Group,
|
|
131
|
+
core.User,
|
|
132
|
+
core.Permission,
|
|
133
|
+
]
|
|
134
|
+
|
|
135
|
+
CUSTOM_INDICES = [
|
|
136
|
+
{
|
|
137
|
+
"space": "management",
|
|
138
|
+
"subpath": "roles",
|
|
139
|
+
"class": core.Role,
|
|
140
|
+
"exclude_from_index": [
|
|
141
|
+
"relationships",
|
|
142
|
+
"acl",
|
|
143
|
+
"is_active",
|
|
144
|
+
"description",
|
|
145
|
+
"displayname",
|
|
146
|
+
"payload",
|
|
147
|
+
],
|
|
148
|
+
},
|
|
149
|
+
{
|
|
150
|
+
"space": "management",
|
|
151
|
+
"subpath": "groups",
|
|
152
|
+
"class": core.Group,
|
|
153
|
+
"exclude_from_index": [
|
|
154
|
+
"relationships",
|
|
155
|
+
"acl",
|
|
156
|
+
"is_active",
|
|
157
|
+
"description",
|
|
158
|
+
"displayname",
|
|
159
|
+
"payload",
|
|
160
|
+
],
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
"space": "management",
|
|
164
|
+
"subpath": "users",
|
|
165
|
+
"class": core.User,
|
|
166
|
+
"exclude_from_index": [
|
|
167
|
+
"relationships",
|
|
168
|
+
"acl",
|
|
169
|
+
"is_active",
|
|
170
|
+
"description",
|
|
171
|
+
"displayname",
|
|
172
|
+
"payload",
|
|
173
|
+
"password",
|
|
174
|
+
"is_email_verified",
|
|
175
|
+
"is_msisdn_verified",
|
|
176
|
+
"type",
|
|
177
|
+
"force_password_change",
|
|
178
|
+
"social_avatar_url",
|
|
179
|
+
],
|
|
180
|
+
},
|
|
181
|
+
{
|
|
182
|
+
"space": "management",
|
|
183
|
+
"subpath": "permissions",
|
|
184
|
+
"class": core.Permission,
|
|
185
|
+
"exclude_from_index": [
|
|
186
|
+
"relationships",
|
|
187
|
+
"acl",
|
|
188
|
+
"is_active",
|
|
189
|
+
"description",
|
|
190
|
+
"displayname",
|
|
191
|
+
"payload",
|
|
192
|
+
"subpaths",
|
|
193
|
+
"resource_types",
|
|
194
|
+
"actions",
|
|
195
|
+
"conditions",
|
|
196
|
+
"restricted_fields",
|
|
197
|
+
"allowed_fields_values",
|
|
198
|
+
],
|
|
199
|
+
},
|
|
200
|
+
]
|
|
201
|
+
|
|
202
|
+
SYS_ATTRIBUTES = [
|
|
203
|
+
"payload_string",
|
|
204
|
+
"query_policies",
|
|
205
|
+
"subpath",
|
|
206
|
+
"resource_type",
|
|
207
|
+
"meta_doc_id",
|
|
208
|
+
"payload_doc_id",
|
|
209
|
+
"payload_string",
|
|
210
|
+
"view_acl",
|
|
211
|
+
]
|
|
212
|
+
redis_indices: dict[str, dict[str, Search]] = {}
|
|
213
|
+
POOL: BlockingConnectionPool = BlockingConnectionPool(
|
|
214
|
+
timeout=10,
|
|
215
|
+
host=settings.redis_host,
|
|
216
|
+
port=settings.redis_port,
|
|
217
|
+
password=settings.redis_password,
|
|
218
|
+
protocol=3,
|
|
219
|
+
max_connections=settings.redis_pool_max_connections,
|
|
220
|
+
decode_responses=True)
|
|
221
|
+
|
|
222
|
+
def __new__(cls):
|
|
223
|
+
if not hasattr(cls, 'instance'):
|
|
224
|
+
cls.instance = super(RedisServices, cls).__new__(cls)
|
|
225
|
+
return cls.instance
|
|
226
|
+
|
|
227
|
+
def __init__(self):
|
|
228
|
+
try:
|
|
229
|
+
super().__init__(connection_pool=RedisServices.POOL)
|
|
230
|
+
except redis.exceptions.ConnectionError as e: # type: ignore
|
|
231
|
+
print("[!FATAL]", e)
|
|
232
|
+
sys.exit(127)
|
|
233
|
+
|
|
234
|
+
async def close_pool(self):
|
|
235
|
+
# print('{"Disconnecting connection pool":"initated"}')
|
|
236
|
+
await self.aclose()
|
|
237
|
+
await RedisServices.POOL.aclose()
|
|
238
|
+
await RedisServices.POOL.disconnect(True)
|
|
239
|
+
|
|
240
|
+
async def create_index(
|
|
241
|
+
self,
|
|
242
|
+
space_name: str,
|
|
243
|
+
schema_name: str,
|
|
244
|
+
redis_schema: list[Field],
|
|
245
|
+
del_docs: bool = True,
|
|
246
|
+
):
|
|
247
|
+
"""
|
|
248
|
+
create redis schema index, drop it if exist first
|
|
249
|
+
"""
|
|
250
|
+
try:
|
|
251
|
+
await self.redis_indices[space_name][schema_name].dropindex(
|
|
252
|
+
delete_documents=del_docs
|
|
253
|
+
)
|
|
254
|
+
except Exception as _:
|
|
255
|
+
pass
|
|
256
|
+
# logger.error(f"Error at redis_services.create_index: {e}")
|
|
257
|
+
|
|
258
|
+
await self.redis_indices[space_name][schema_name].create_index(
|
|
259
|
+
redis_schema,
|
|
260
|
+
definition=IndexDefinition(
|
|
261
|
+
prefix=[
|
|
262
|
+
f"{space_name}:{schema_name}:",
|
|
263
|
+
f"{space_name}:{schema_name}/",
|
|
264
|
+
],
|
|
265
|
+
index_type=IndexType.JSON,
|
|
266
|
+
),
|
|
267
|
+
)
|
|
268
|
+
# print(f"Created new index named {space_name}:{schema_name}\n")
|
|
269
|
+
|
|
270
|
+
def get_redis_index_fields(self, key_chain, property, redis_schema_definition):
|
|
271
|
+
"""
|
|
272
|
+
takes a key and a value of a schema definition, and returns the redis schema index
|
|
273
|
+
"""
|
|
274
|
+
REDIS_SCHEMA_DATA_TYPES_MAPPER = {
|
|
275
|
+
"string": TextField,
|
|
276
|
+
"boolean": TagField,
|
|
277
|
+
"integer": NumericField,
|
|
278
|
+
"number": NumericField,
|
|
279
|
+
"array": TagField,
|
|
280
|
+
}
|
|
281
|
+
if not isinstance(property, dict) or key_chain.endswith("."):
|
|
282
|
+
return redis_schema_definition
|
|
283
|
+
|
|
284
|
+
if "type" in property and property["type"] != "object":
|
|
285
|
+
if property["type"] == "null" or not isinstance(
|
|
286
|
+
property["type"], str
|
|
287
|
+
):
|
|
288
|
+
return redis_schema_definition
|
|
289
|
+
|
|
290
|
+
property_name = key_chain.replace(".", "_")
|
|
291
|
+
sortable = True
|
|
292
|
+
|
|
293
|
+
if (
|
|
294
|
+
property["type"] == "array"
|
|
295
|
+
and property.get("items", {}).get("type", None) == "object"
|
|
296
|
+
and "items" in property
|
|
297
|
+
and "properties" in property["items"]
|
|
298
|
+
):
|
|
299
|
+
for property_key, property_value in property["items"][
|
|
300
|
+
"properties"
|
|
301
|
+
].items():
|
|
302
|
+
if property_value["type"] != "string":
|
|
303
|
+
continue
|
|
304
|
+
redis_schema_definition.append(
|
|
305
|
+
TagField(
|
|
306
|
+
f"$.{key_chain}.*.{property_key}",
|
|
307
|
+
as_name=f"{key_chain}_{property_key}",
|
|
308
|
+
)
|
|
309
|
+
)
|
|
310
|
+
return redis_schema_definition
|
|
311
|
+
|
|
312
|
+
if property["type"] == "array":
|
|
313
|
+
key_chain += ".*"
|
|
314
|
+
sortable = False
|
|
315
|
+
|
|
316
|
+
redis_schema_definition.append(
|
|
317
|
+
REDIS_SCHEMA_DATA_TYPES_MAPPER[property["type"]](
|
|
318
|
+
f"$.{key_chain}", sortable=sortable, as_name=property_name
|
|
319
|
+
)
|
|
320
|
+
)
|
|
321
|
+
return redis_schema_definition
|
|
322
|
+
|
|
323
|
+
if "oneOf" in property:
|
|
324
|
+
for item in property["oneOf"]:
|
|
325
|
+
redis_schema_definition = self.get_redis_index_fields(
|
|
326
|
+
key_chain, item, redis_schema_definition
|
|
327
|
+
)
|
|
328
|
+
return redis_schema_definition
|
|
329
|
+
|
|
330
|
+
if "properties" not in property:
|
|
331
|
+
return redis_schema_definition
|
|
332
|
+
|
|
333
|
+
for property_key, property_value in property["properties"].items():
|
|
334
|
+
redis_schema_definition = self.get_redis_index_fields(
|
|
335
|
+
f"{key_chain}.{property_key}", property_value, redis_schema_definition
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
return redis_schema_definition
|
|
339
|
+
|
|
340
|
+
def generate_redis_index_from_class(
|
|
341
|
+
self, class_ref: type[core.Meta], exclude_from_index: list
|
|
342
|
+
) -> list[Field]:
|
|
343
|
+
class_types_to_redis_fields_mapper = {
|
|
344
|
+
"str": TextField,
|
|
345
|
+
"bool": TextField,
|
|
346
|
+
"UUID": TextField,
|
|
347
|
+
"list": TagField,
|
|
348
|
+
"datetime": NumericField,
|
|
349
|
+
"set": TagField,
|
|
350
|
+
"dict": TextField,
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
redis_schema : list[Field] = []
|
|
354
|
+
for field_name, model_field in class_ref.model_fields.items():
|
|
355
|
+
if field_name in exclude_from_index:
|
|
356
|
+
continue
|
|
357
|
+
|
|
358
|
+
mapper_key = None
|
|
359
|
+
for allowed_type in list(class_types_to_redis_fields_mapper.keys()):
|
|
360
|
+
if str(model_field.annotation).startswith(allowed_type):
|
|
361
|
+
mapper_key = allowed_type
|
|
362
|
+
break
|
|
363
|
+
if not mapper_key:
|
|
364
|
+
continue
|
|
365
|
+
|
|
366
|
+
redis_index_column_type = class_types_to_redis_fields_mapper[mapper_key]
|
|
367
|
+
|
|
368
|
+
redis_key = (
|
|
369
|
+
f"$.{field_name}"
|
|
370
|
+
if redis_index_column_type != TagField
|
|
371
|
+
else f"$.{field_name}.*"
|
|
372
|
+
)
|
|
373
|
+
redis_schema.append(redis_index_column_type(redis_key, as_name=field_name))
|
|
374
|
+
|
|
375
|
+
return redis_schema
|
|
376
|
+
|
|
377
|
+
async def create_custom_indices(self, for_space: str | None = None):
|
|
378
|
+
redis_schemas: dict[str, list] = {}
|
|
379
|
+
for i, index in enumerate(self.CUSTOM_INDICES):
|
|
380
|
+
if (
|
|
381
|
+
for_space
|
|
382
|
+
and index["space"] != for_space
|
|
383
|
+
or not isinstance(index["exclude_from_index"], list)
|
|
384
|
+
):
|
|
385
|
+
continue
|
|
386
|
+
|
|
387
|
+
exclude_from_index: list = index["exclude_from_index"]
|
|
388
|
+
|
|
389
|
+
redis_schemas.setdefault(f"{index['space']}", [])
|
|
390
|
+
self.redis_indices.setdefault(
|
|
391
|
+
f"{index['space']}:meta", {}
|
|
392
|
+
)
|
|
393
|
+
|
|
394
|
+
generated_schema_fields : list[Field] = self.generate_redis_index_from_class(
|
|
395
|
+
self.CUSTOM_CLASSES[i], exclude_from_index
|
|
396
|
+
)
|
|
397
|
+
|
|
398
|
+
redis_schemas[f"{index['space']}"] = (
|
|
399
|
+
self.append_unique_index_fields(
|
|
400
|
+
generated_schema_fields,
|
|
401
|
+
redis_schemas[f"{index['space']}"],
|
|
402
|
+
)
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
for space_name, redis_schema in redis_schemas.items():
|
|
406
|
+
redis_schema = self.append_unique_index_fields(
|
|
407
|
+
redis_schema,
|
|
408
|
+
self.META_SCHEMA,
|
|
409
|
+
)
|
|
410
|
+
await self.create_index(
|
|
411
|
+
f"{space_name}",
|
|
412
|
+
"meta",
|
|
413
|
+
redis_schema,
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
async def create_indices(
|
|
417
|
+
self,
|
|
418
|
+
for_space: str | None = None,
|
|
419
|
+
for_schemas: list | None = None,
|
|
420
|
+
for_custom_indices: bool = True,
|
|
421
|
+
del_docs: bool = True,
|
|
422
|
+
):
|
|
423
|
+
"""
|
|
424
|
+
Loop over all spaces, and for each one we create: (only if indexing_enabled is true for the space)
|
|
425
|
+
1-index for meta file called space_name:meta
|
|
426
|
+
2-indices for schema files called space_name:schema_shortname
|
|
427
|
+
"""
|
|
428
|
+
spaces = await self.get_doc_by_id("spaces")
|
|
429
|
+
for space_name in spaces:
|
|
430
|
+
space_obj = core.Space.model_validate_json(spaces[space_name])
|
|
431
|
+
if (
|
|
432
|
+
for_space and for_space != space_name
|
|
433
|
+
) or not space_obj.indexing_enabled:
|
|
434
|
+
continue
|
|
435
|
+
|
|
436
|
+
# CREATE REDIS INDEX FOR THE META FILES INSIDE THE SPACE
|
|
437
|
+
self.redis_indices[f"{space_name}"] = {}
|
|
438
|
+
self.redis_indices[f"{space_name}"]["meta"] = self.ft(
|
|
439
|
+
f"{space_name}:meta"
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
await self.create_index(
|
|
443
|
+
f"{space_name}", "meta", self.META_SCHEMA, del_docs
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
# CREATE REDIS INDEX FOR EACH SCHEMA DEFINITION INSIDE THE SPACE
|
|
447
|
+
schemas_file_pattern = re.compile(r"(\w*).json")
|
|
448
|
+
schemas_glob = "*.json"
|
|
449
|
+
path = (
|
|
450
|
+
settings.spaces_folder
|
|
451
|
+
/ space_name
|
|
452
|
+
/ "schema"
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
for schema_path in path.glob(schemas_glob):
|
|
456
|
+
# GET SCHEMA SHORTNAME
|
|
457
|
+
match = schemas_file_pattern.search(str(schema_path))
|
|
458
|
+
if not match or not schema_path.is_file():
|
|
459
|
+
continue
|
|
460
|
+
schema_shortname = match.group(1)
|
|
461
|
+
|
|
462
|
+
if for_schemas and schema_shortname not in for_schemas:
|
|
463
|
+
continue
|
|
464
|
+
|
|
465
|
+
if schema_shortname in ["meta_schema", "meta"]:
|
|
466
|
+
continue
|
|
467
|
+
|
|
468
|
+
# GET SCHEMA PROPERTIES AND
|
|
469
|
+
# GENERATE REDIS INDEX DEFINITION BY MAPPIN SCHEMA PROPERTIES TO REDIS INDEX FIELDS
|
|
470
|
+
schema_content = json.loads(schema_path.read_text())
|
|
471
|
+
schema_content = resolve_schema_references(schema_content)
|
|
472
|
+
redis_schema_definition : list[Field] = self.META_SCHEMA
|
|
473
|
+
if "properties" in schema_content:
|
|
474
|
+
for key, property in schema_content["properties"].items():
|
|
475
|
+
generated_schema_fields = self.get_redis_index_fields(
|
|
476
|
+
key, property, []
|
|
477
|
+
)
|
|
478
|
+
redis_schema_definition = self.append_unique_index_fields(
|
|
479
|
+
generated_schema_fields, redis_schema_definition
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
elif "oneOf" in schema_content:
|
|
483
|
+
for item in schema_content["oneOf"]:
|
|
484
|
+
for key, property in item["properties"].items():
|
|
485
|
+
generated_schema_fields = self.get_redis_index_fields(
|
|
486
|
+
key, property, []
|
|
487
|
+
)
|
|
488
|
+
redis_schema_definition = (
|
|
489
|
+
self.append_unique_index_fields(
|
|
490
|
+
generated_schema_fields, redis_schema_definition
|
|
491
|
+
)
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
if redis_schema_definition:
|
|
495
|
+
self.redis_indices[f"{space_name}"][
|
|
496
|
+
schema_shortname
|
|
497
|
+
] = self.ft(f"{space_name}:{schema_shortname}")
|
|
498
|
+
field_names = [f.as_name for f in redis_schema_definition]
|
|
499
|
+
if "meta_doc_id" not in field_names:
|
|
500
|
+
redis_schema_definition.append(TextField("$.meta_doc_id", no_stem=True, as_name="meta_doc_id")) # type: ignore
|
|
501
|
+
|
|
502
|
+
await self.create_index(
|
|
503
|
+
f"{space_name}",
|
|
504
|
+
schema_shortname,
|
|
505
|
+
redis_schema_definition,
|
|
506
|
+
del_docs,
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
if for_custom_indices:
|
|
510
|
+
await self.create_custom_indices(for_space)
|
|
511
|
+
|
|
512
|
+
def append_unique_index_fields(self, new_index: list[Field], base_index: list[Field]):
|
|
513
|
+
base_index_clone = base_index.copy()
|
|
514
|
+
for field in new_index:
|
|
515
|
+
registered_field = False
|
|
516
|
+
for base_field in base_index_clone:
|
|
517
|
+
if (
|
|
518
|
+
field.redis_args()[2] # Compare field name
|
|
519
|
+
== base_field.redis_args()[2] # Compare AS name
|
|
520
|
+
):
|
|
521
|
+
registered_field = True
|
|
522
|
+
break
|
|
523
|
+
if not registered_field:
|
|
524
|
+
base_index_clone.append(field)
|
|
525
|
+
|
|
526
|
+
return base_index_clone
|
|
527
|
+
|
|
528
|
+
def generate_doc_id(
|
|
529
|
+
self,
|
|
530
|
+
space_name: str,
|
|
531
|
+
schema_shortname: str,
|
|
532
|
+
shortname: str,
|
|
533
|
+
subpath: str,
|
|
534
|
+
):
|
|
535
|
+
# if subpath[0] == "/":
|
|
536
|
+
# subpath = subpath[1:]
|
|
537
|
+
# if subpath[-1] == "/":
|
|
538
|
+
# subpath = subpath[:-1]
|
|
539
|
+
subpath = subpath.strip("/")
|
|
540
|
+
return f"{space_name}:{schema_shortname}:{subpath}/{shortname}"
|
|
541
|
+
|
|
542
|
+
def prepare_meta_doc(
|
|
543
|
+
self, space_name: str, subpath: str, meta: core.Meta
|
|
544
|
+
):
|
|
545
|
+
resource_type = ResourceType(meta.__class__.__name__.lower())
|
|
546
|
+
meta_doc_id = self.generate_doc_id(
|
|
547
|
+
space_name, "meta", meta.shortname, subpath
|
|
548
|
+
)
|
|
549
|
+
payload_doc_id = None
|
|
550
|
+
if meta.payload and meta.payload.schema_shortname:
|
|
551
|
+
payload_doc_id = self.generate_doc_id(
|
|
552
|
+
space_name,
|
|
553
|
+
meta.payload.schema_shortname,
|
|
554
|
+
meta.shortname,
|
|
555
|
+
subpath,
|
|
556
|
+
)
|
|
557
|
+
meta.model_rebuild()
|
|
558
|
+
meta_json = json.loads(meta.model_dump_json(serialize_as_any=False, exclude_none=True,warnings="error"))
|
|
559
|
+
meta_json["query_policies"] = generate_query_policies(
|
|
560
|
+
space_name,
|
|
561
|
+
subpath,
|
|
562
|
+
resource_type,
|
|
563
|
+
meta.is_active,
|
|
564
|
+
meta.owner_shortname,
|
|
565
|
+
meta.owner_group_shortname,
|
|
566
|
+
meta.shortname,
|
|
567
|
+
)
|
|
568
|
+
meta_json["view_acl"] = self.generate_view_acl(meta_json.get("acl"))
|
|
569
|
+
meta_json["subpath"] = subpath
|
|
570
|
+
meta_json["resource_type"] = resource_type
|
|
571
|
+
meta_json["created_at"] = meta.created_at.timestamp()
|
|
572
|
+
meta_json["updated_at"] = meta.updated_at.timestamp()
|
|
573
|
+
meta_json["payload_doc_id"] = payload_doc_id
|
|
574
|
+
|
|
575
|
+
return meta_doc_id, meta_json
|
|
576
|
+
|
|
577
|
+
def generate_view_acl(self, acl: list[dict[str, Any]] | None) -> list[str] | None:
|
|
578
|
+
if not acl:
|
|
579
|
+
return None
|
|
580
|
+
|
|
581
|
+
view_acl: list[str] = []
|
|
582
|
+
|
|
583
|
+
for access in acl:
|
|
584
|
+
if ActionType.view in access.get("allowed_actions", []) or ActionType.query in access.get("allowed_actions", []):
|
|
585
|
+
view_acl.append(access["user_shortname"])
|
|
586
|
+
|
|
587
|
+
return view_acl
|
|
588
|
+
|
|
589
|
+
async def save_meta_doc(
|
|
590
|
+
self, space_name: str, subpath: str, meta: core.Meta
|
|
591
|
+
):
|
|
592
|
+
meta_doc_id, meta_json = self.prepare_meta_doc(
|
|
593
|
+
space_name, subpath, meta
|
|
594
|
+
)
|
|
595
|
+
await self.save_doc(meta_doc_id, meta_json)
|
|
596
|
+
return meta_doc_id, meta_json
|
|
597
|
+
|
|
598
|
+
def prepare_payload_doc(
|
|
599
|
+
self,
|
|
600
|
+
space_name: str,
|
|
601
|
+
subpath: str,
|
|
602
|
+
meta: core.Meta,
|
|
603
|
+
payload: dict,
|
|
604
|
+
resource_type: ResourceType = ResourceType.content,
|
|
605
|
+
):
|
|
606
|
+
if meta.payload is None:
|
|
607
|
+
print(
|
|
608
|
+
f"Missing payload for {space_name}/{subpath} of type {resource_type}"
|
|
609
|
+
)
|
|
610
|
+
return "", {}
|
|
611
|
+
if meta.payload.body is None:
|
|
612
|
+
print(
|
|
613
|
+
f"Missing body for {space_name}/{subpath} of type {resource_type}"
|
|
614
|
+
)
|
|
615
|
+
return "", {}
|
|
616
|
+
if not isinstance(meta.payload.body, str):
|
|
617
|
+
print("body should be type of string")
|
|
618
|
+
return "", {}
|
|
619
|
+
payload_shortname = meta.payload.body.split(".")[0]
|
|
620
|
+
meta_doc_id = self.generate_doc_id(
|
|
621
|
+
space_name, "meta", payload_shortname, subpath
|
|
622
|
+
)
|
|
623
|
+
docid = self.generate_doc_id(
|
|
624
|
+
space_name,
|
|
625
|
+
meta.payload.schema_shortname or "",
|
|
626
|
+
payload_shortname,
|
|
627
|
+
subpath,
|
|
628
|
+
)
|
|
629
|
+
|
|
630
|
+
payload["query_policies"] = generate_query_policies(
|
|
631
|
+
space_name,
|
|
632
|
+
subpath,
|
|
633
|
+
resource_type,
|
|
634
|
+
meta.is_active,
|
|
635
|
+
meta.owner_shortname,
|
|
636
|
+
meta.owner_group_shortname,
|
|
637
|
+
meta.shortname,
|
|
638
|
+
)
|
|
639
|
+
if not payload["query_policies"]:
|
|
640
|
+
print(
|
|
641
|
+
f"Warning: this entry `{space_name}/{subpath}/{meta.shortname}` can't be accessed"
|
|
642
|
+
)
|
|
643
|
+
payload["subpath"] = subpath
|
|
644
|
+
payload["resource_type"] = resource_type
|
|
645
|
+
payload["shortname"] = payload_shortname
|
|
646
|
+
payload["meta_doc_id"] = meta_doc_id
|
|
647
|
+
|
|
648
|
+
return docid, payload
|
|
649
|
+
|
|
650
|
+
async def save_payload_doc(
|
|
651
|
+
self,
|
|
652
|
+
space_name: str,
|
|
653
|
+
subpath: str,
|
|
654
|
+
meta: core.Meta,
|
|
655
|
+
payload: dict,
|
|
656
|
+
resource_type: ResourceType = ResourceType.content,
|
|
657
|
+
):
|
|
658
|
+
docid, payload = self.prepare_payload_doc(
|
|
659
|
+
space_name, subpath, meta, payload, resource_type
|
|
660
|
+
)
|
|
661
|
+
if docid == "":
|
|
662
|
+
return
|
|
663
|
+
await self.save_doc(docid, payload)
|
|
664
|
+
|
|
665
|
+
async def get_payload_doc(self, doc_id: str, resource_type: ResourceType):
|
|
666
|
+
resource_class = getattr(
|
|
667
|
+
sys.modules["models.core"],
|
|
668
|
+
camel_case(resource_type),
|
|
669
|
+
)
|
|
670
|
+
payload_redis_doc = await self.get_doc_by_id(doc_id)
|
|
671
|
+
payload_doc_content: dict = {}
|
|
672
|
+
if not payload_redis_doc:
|
|
673
|
+
return payload_doc_content
|
|
674
|
+
|
|
675
|
+
not_payload_attr = RedisServices.SYS_ATTRIBUTES + list(
|
|
676
|
+
resource_class.model_fields.keys()
|
|
677
|
+
)
|
|
678
|
+
for key, value in payload_redis_doc.items():
|
|
679
|
+
if key not in not_payload_attr:
|
|
680
|
+
payload_doc_content[key] = value
|
|
681
|
+
return payload_doc_content
|
|
682
|
+
|
|
683
|
+
async def save_lock_doc(
|
|
684
|
+
self,
|
|
685
|
+
space_name: str,
|
|
686
|
+
subpath: str,
|
|
687
|
+
payload_shortname: str,
|
|
688
|
+
owner_shortname: str,
|
|
689
|
+
ttl: int,
|
|
690
|
+
) -> LockAction:
|
|
691
|
+
lock_doc_id = self.generate_doc_id(
|
|
692
|
+
space_name, "lock", payload_shortname, subpath
|
|
693
|
+
)
|
|
694
|
+
lock_data = await self.get_lock_doc(
|
|
695
|
+
space_name, subpath, payload_shortname
|
|
696
|
+
)
|
|
697
|
+
if not lock_data:
|
|
698
|
+
payload = {
|
|
699
|
+
"owner_shortname": owner_shortname,
|
|
700
|
+
"lock_time": str(datetime.now().isoformat()),
|
|
701
|
+
}
|
|
702
|
+
result = await self.save_doc(lock_doc_id, payload, nx=True)
|
|
703
|
+
if result is None:
|
|
704
|
+
lock_payload = await self.get_lock_doc(
|
|
705
|
+
space_name, subpath, payload_shortname
|
|
706
|
+
)
|
|
707
|
+
if lock_payload["owner_shortname"] != owner_shortname:
|
|
708
|
+
raise api.Exception(
|
|
709
|
+
status_code=status.HTTP_403_FORBIDDEN,
|
|
710
|
+
error=api.Error(
|
|
711
|
+
type="lock",
|
|
712
|
+
code=InternalErrorCode.LOCKED_ENTRY,
|
|
713
|
+
message=f"Entry is already locked by {lock_payload['owner_shortname']}",
|
|
714
|
+
),
|
|
715
|
+
)
|
|
716
|
+
lock_type = LockAction.lock
|
|
717
|
+
else:
|
|
718
|
+
lock_type = LockAction.extend
|
|
719
|
+
await self.set_ttl(lock_doc_id, ttl)
|
|
720
|
+
return lock_type
|
|
721
|
+
|
|
722
|
+
async def get_lock_doc(
|
|
723
|
+
self,
|
|
724
|
+
space_name: str,
|
|
725
|
+
subpath: str,
|
|
726
|
+
payload_shortname: str,
|
|
727
|
+
):
|
|
728
|
+
lock_doc_id = self.generate_doc_id(
|
|
729
|
+
space_name, "lock", payload_shortname, subpath
|
|
730
|
+
)
|
|
731
|
+
return await self.get_doc_by_id(lock_doc_id)
|
|
732
|
+
|
|
733
|
+
async def delete_lock_doc(
|
|
734
|
+
self,
|
|
735
|
+
space_name: str,
|
|
736
|
+
subpath: str,
|
|
737
|
+
payload_shortname: str,
|
|
738
|
+
):
|
|
739
|
+
await self.delete_doc(
|
|
740
|
+
space_name, "lock", payload_shortname, subpath
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
async def is_entry_locked(
|
|
744
|
+
self,
|
|
745
|
+
space_name: str,
|
|
746
|
+
subpath: str,
|
|
747
|
+
shortname: str,
|
|
748
|
+
user_shortname: str,
|
|
749
|
+
):
|
|
750
|
+
lock_payload = await self.get_lock_doc(
|
|
751
|
+
space_name, subpath, shortname
|
|
752
|
+
)
|
|
753
|
+
if lock_payload:
|
|
754
|
+
if user_shortname:
|
|
755
|
+
return lock_payload["owner_shortname"] != user_shortname
|
|
756
|
+
else:
|
|
757
|
+
return True
|
|
758
|
+
return False
|
|
759
|
+
|
|
760
|
+
async def save_doc(
|
|
761
|
+
self, doc_id: str, payload: dict, path: str = Path.root_path(), nx: bool = False
|
|
762
|
+
):
|
|
763
|
+
x = self.json().set(doc_id, path, payload, nx=nx)
|
|
764
|
+
if x and isinstance(x, Awaitable):
|
|
765
|
+
await x
|
|
766
|
+
|
|
767
|
+
async def save_bulk(self, data: list, path: str = Path.root_path()):
|
|
768
|
+
pipe = self.pipeline()
|
|
769
|
+
for document in data:
|
|
770
|
+
pipe.json().set(document["doc_id"], path, document["payload"])
|
|
771
|
+
return await pipe.execute()
|
|
772
|
+
|
|
773
|
+
async def get_count(self, space_name: str, schema_shortname: str):
|
|
774
|
+
ft_index = self.ft(f"{space_name}:{schema_shortname}")
|
|
775
|
+
|
|
776
|
+
try:
|
|
777
|
+
info = await ft_index.info()
|
|
778
|
+
return info["num_docs"] # type: ignore
|
|
779
|
+
except Exception as e:
|
|
780
|
+
logger.error(f"Error at redis_services.get_count: {e}")
|
|
781
|
+
return 0
|
|
782
|
+
|
|
783
|
+
# aggregate_request = AggregateRequest().group_by([], count_reducer().alias("counter"))
|
|
784
|
+
# aggregate = await ft_index.aggregate(aggregate_request)
|
|
785
|
+
# print("\n\n\n aggregate res: ", aggregate.rows, "\n\n")
|
|
786
|
+
|
|
787
|
+
async def search(
|
|
788
|
+
self,
|
|
789
|
+
space_name: str,
|
|
790
|
+
search: str,
|
|
791
|
+
filters: dict[str, str | list],
|
|
792
|
+
limit: int,
|
|
793
|
+
offset: int,
|
|
794
|
+
exact_subpath: bool = False,
|
|
795
|
+
sort_type: SortType = SortType.ascending,
|
|
796
|
+
sort_by: str | None = None,
|
|
797
|
+
highlight_fields: list[str] | None = None,
|
|
798
|
+
schema_name: str = "meta",
|
|
799
|
+
return_fields: list = [],
|
|
800
|
+
):
|
|
801
|
+
# Tries to get the index from the provided space
|
|
802
|
+
try:
|
|
803
|
+
ft_index = self.ft(f"{space_name}:{schema_name}")
|
|
804
|
+
await ft_index.info()
|
|
805
|
+
except Exception as e:
|
|
806
|
+
logger.error(
|
|
807
|
+
f"Error accessing index: {space_name}:{schema_name}, at redis_services.search: {e}"
|
|
808
|
+
)
|
|
809
|
+
return {"data": [], "total": 0}
|
|
810
|
+
|
|
811
|
+
search_query = Query(
|
|
812
|
+
query_string=self.prepare_query_string(search, filters, exact_subpath)
|
|
813
|
+
)
|
|
814
|
+
|
|
815
|
+
if highlight_fields:
|
|
816
|
+
search_query.highlight(highlight_fields, ["", ""])
|
|
817
|
+
|
|
818
|
+
if sort_by:
|
|
819
|
+
search_query.sort_by(sort_by, sort_type == SortType.ascending)
|
|
820
|
+
|
|
821
|
+
if return_fields:
|
|
822
|
+
search_query.return_fields(*return_fields)
|
|
823
|
+
|
|
824
|
+
search_query.paging(offset, limit)
|
|
825
|
+
|
|
826
|
+
try:
|
|
827
|
+
# print(f"ARGS {search_query.get_args()} O {search_query.query_string()}")
|
|
828
|
+
search_res = await ft_index.search(query=search_query) # type: ignore
|
|
829
|
+
if (
|
|
830
|
+
isinstance(search_res, dict)
|
|
831
|
+
and "results" in search_res
|
|
832
|
+
and "total_results" in search_res
|
|
833
|
+
):
|
|
834
|
+
|
|
835
|
+
return {
|
|
836
|
+
"data": [
|
|
837
|
+
one["extra_attributes"]["$"]
|
|
838
|
+
for one in search_res["results"]
|
|
839
|
+
if "extra_attributes" in one
|
|
840
|
+
],
|
|
841
|
+
"total": search_res["total_results"],
|
|
842
|
+
}
|
|
843
|
+
else:
|
|
844
|
+
return {}
|
|
845
|
+
except Exception:
|
|
846
|
+
return {}
|
|
847
|
+
|
|
848
|
+
async def aggregate(
|
|
849
|
+
self,
|
|
850
|
+
space_name: str,
|
|
851
|
+
search: str,
|
|
852
|
+
filters: dict[str, str | list],
|
|
853
|
+
group_by: list[str],
|
|
854
|
+
reducers: list[RedisReducer],
|
|
855
|
+
max: int = 10,
|
|
856
|
+
exact_subpath: bool = False,
|
|
857
|
+
sort_type: SortType = SortType.ascending,
|
|
858
|
+
sort_by: str | None = None,
|
|
859
|
+
schema_name: str = "meta",
|
|
860
|
+
load: list = [],
|
|
861
|
+
) -> list:
|
|
862
|
+
# Tries to get the index from the provided space
|
|
863
|
+
try:
|
|
864
|
+
ft_index = self.ft(f"{space_name}:{schema_name}")
|
|
865
|
+
await ft_index.info()
|
|
866
|
+
except Exception:
|
|
867
|
+
return []
|
|
868
|
+
|
|
869
|
+
aggr_request = aggregation.AggregateRequest(
|
|
870
|
+
self.prepare_query_string(search, filters, exact_subpath)
|
|
871
|
+
)
|
|
872
|
+
if group_by:
|
|
873
|
+
reducers_functions = [
|
|
874
|
+
RedisReducerName.mapper(reducer.reducer_name)(*reducer.args).alias(
|
|
875
|
+
reducer.alias
|
|
876
|
+
)
|
|
877
|
+
for reducer in reducers
|
|
878
|
+
]
|
|
879
|
+
aggr_request.group_by(group_by, *reducers_functions)
|
|
880
|
+
|
|
881
|
+
if sort_by:
|
|
882
|
+
aggr_request.sort_by(
|
|
883
|
+
[# type: ignore
|
|
884
|
+
str(
|
|
885
|
+
aggregation.Desc(f"@{sort_by}")
|
|
886
|
+
if sort_type == SortType.ascending
|
|
887
|
+
else aggregation.Asc(f"@{sort_by}")
|
|
888
|
+
)
|
|
889
|
+
],
|
|
890
|
+
max=max,
|
|
891
|
+
)
|
|
892
|
+
|
|
893
|
+
if load:
|
|
894
|
+
aggr_request.load(*load)
|
|
895
|
+
|
|
896
|
+
try:
|
|
897
|
+
aggr_res = await ft_index.aggregate(aggr_request) # type: ignore
|
|
898
|
+
if aggr_res.get("results") and isinstance(aggr_res["results"], list): # type: ignore
|
|
899
|
+
return aggr_res["results"] # type: ignore
|
|
900
|
+
except Exception:
|
|
901
|
+
pass
|
|
902
|
+
return []
|
|
903
|
+
|
|
904
|
+
def prepare_query_string(
|
|
905
|
+
self, search: str, filters: dict[str, str | list], exact_subpath: bool
|
|
906
|
+
):
|
|
907
|
+
query_string = search
|
|
908
|
+
|
|
909
|
+
redis_escape_chars = str.maketrans(
|
|
910
|
+
{":": r"\:", "/": r"\/", "-": r"\-", " ": r"\ "}
|
|
911
|
+
)
|
|
912
|
+
if filters.get("query_policies", None) == []:
|
|
913
|
+
filters["query_policies"] = ["__NONE__"]
|
|
914
|
+
|
|
915
|
+
for item in filters.items():
|
|
916
|
+
if item[0] == "tags" and item[1]:
|
|
917
|
+
query_string += (
|
|
918
|
+
" @"
|
|
919
|
+
+ item[0]
|
|
920
|
+
+ ":{"
|
|
921
|
+
+ "|".join(item[1]).translate(redis_escape_chars)
|
|
922
|
+
+ "}"
|
|
923
|
+
)
|
|
924
|
+
elif item[0] == "query_policies" and item[1] is not None:
|
|
925
|
+
query_string += (
|
|
926
|
+
f" ((@{item[0]}:{{" + "|".join(item[1]).translate(redis_escape_chars) + "})"
|
|
927
|
+
)
|
|
928
|
+
if filters.get("user_shortname", None) is not None:
|
|
929
|
+
query_string += (
|
|
930
|
+
f" | (@view_acl:{{{filters['user_shortname']}}}) )"
|
|
931
|
+
)
|
|
932
|
+
else:
|
|
933
|
+
query_string += ")"
|
|
934
|
+
elif item[0] == "created_at" and item[1]:
|
|
935
|
+
query_string += f" @{item[0]}:{item[1]}"
|
|
936
|
+
elif item[0] == "subpath" and exact_subpath:
|
|
937
|
+
search_value = ""
|
|
938
|
+
for subpath in item[1]: # Handle existence/absence of `/`
|
|
939
|
+
search_value += "|" + subpath.strip("/")
|
|
940
|
+
search_value += "|" + f"/{subpath}".replace("//", "/")
|
|
941
|
+
|
|
942
|
+
exact_subpath_value = search_value.strip("|").translate(
|
|
943
|
+
redis_escape_chars
|
|
944
|
+
)
|
|
945
|
+
query_string += f" @exact_subpath:{{{exact_subpath_value}}}"
|
|
946
|
+
elif item[0] == "subpath" and item[1][0] == "/":
|
|
947
|
+
pass
|
|
948
|
+
elif item[1] and item[0] != "user_shortname":
|
|
949
|
+
query_string += " @" + item[0] + ":(" + "|".join(item[1]) + ")"
|
|
950
|
+
|
|
951
|
+
return query_string or "*"
|
|
952
|
+
|
|
953
|
+
async def get_doc_by_id(self, doc_id: str) -> Any:
|
|
954
|
+
try:
|
|
955
|
+
x = self.json().get(name=doc_id)
|
|
956
|
+
if x and isinstance(x, Awaitable):
|
|
957
|
+
value = await x
|
|
958
|
+
if isinstance(value, dict):
|
|
959
|
+
return value
|
|
960
|
+
if isinstance(value, str):
|
|
961
|
+
return json.loads(value)
|
|
962
|
+
else:
|
|
963
|
+
raise Exception(f"Not json dict at id: {doc_id}. data: {value=}")
|
|
964
|
+
else:
|
|
965
|
+
raise Exception(f"Not awaitable {x=}")
|
|
966
|
+
except Exception as e:
|
|
967
|
+
logger.warning(f"Error at redis_services.get_doc_by_id: {doc_id=} {e}")
|
|
968
|
+
return {}
|
|
969
|
+
|
|
970
|
+
async def get_docs_by_ids(self, docs_ids: list[str]) -> list:
|
|
971
|
+
try:
|
|
972
|
+
x = self.json().mget(docs_ids, "$")
|
|
973
|
+
if x and isinstance(x, Awaitable):
|
|
974
|
+
value = await x
|
|
975
|
+
if isinstance(value, list):
|
|
976
|
+
return value
|
|
977
|
+
except Exception as e:
|
|
978
|
+
logger.warning(f"Error at redis_services.get_docs_by_ids: {e}")
|
|
979
|
+
return []
|
|
980
|
+
|
|
981
|
+
async def get_content_by_id(self, doc_id: str) -> Any:
|
|
982
|
+
try:
|
|
983
|
+
return await self.get(doc_id)
|
|
984
|
+
except Exception as e:
|
|
985
|
+
logger.warning(f"Error at redis_services.get_content_by_id: {e}")
|
|
986
|
+
return ""
|
|
987
|
+
|
|
988
|
+
async def delete_doc(
|
|
989
|
+
self, space_name, schema_shortname, shortname, subpath
|
|
990
|
+
):
|
|
991
|
+
docid = self.generate_doc_id(
|
|
992
|
+
space_name, schema_shortname, shortname, subpath
|
|
993
|
+
)
|
|
994
|
+
try:
|
|
995
|
+
x = self.json().delete(key=docid)
|
|
996
|
+
if x and isinstance(x, Awaitable):
|
|
997
|
+
await x
|
|
998
|
+
except Exception as e:
|
|
999
|
+
logger.warning(f"Error at redis_services.delete_doc: {e}")
|
|
1000
|
+
|
|
1001
|
+
async def move_payload_doc(
|
|
1002
|
+
self,
|
|
1003
|
+
space_name,
|
|
1004
|
+
schema_shortname,
|
|
1005
|
+
src_shortname,
|
|
1006
|
+
src_subpath,
|
|
1007
|
+
dest_shortname,
|
|
1008
|
+
dest_subpath,
|
|
1009
|
+
):
|
|
1010
|
+
docid = self.generate_doc_id(
|
|
1011
|
+
space_name, schema_shortname, src_shortname, src_subpath
|
|
1012
|
+
)
|
|
1013
|
+
|
|
1014
|
+
try:
|
|
1015
|
+
doc_content = await self.get_doc_by_id(docid)
|
|
1016
|
+
await self.delete_doc(
|
|
1017
|
+
space_name, schema_shortname, src_shortname, src_subpath
|
|
1018
|
+
)
|
|
1019
|
+
|
|
1020
|
+
new_docid = self.generate_doc_id(
|
|
1021
|
+
space_name, schema_shortname, dest_shortname, dest_subpath
|
|
1022
|
+
)
|
|
1023
|
+
await self.save_doc(new_docid, doc_content)
|
|
1024
|
+
|
|
1025
|
+
except Exception as e:
|
|
1026
|
+
logger.warning(f"Error at redis_services.move_payload_doc: {e}")
|
|
1027
|
+
|
|
1028
|
+
async def move_meta_doc(
|
|
1029
|
+
self, space_name, src_shortname, src_subpath, dest_subpath, meta
|
|
1030
|
+
):
|
|
1031
|
+
try:
|
|
1032
|
+
await self.delete_doc(
|
|
1033
|
+
space_name, "meta", src_shortname, src_subpath
|
|
1034
|
+
)
|
|
1035
|
+
await self.save_meta_doc(space_name, dest_subpath, meta)
|
|
1036
|
+
except Exception as e:
|
|
1037
|
+
logger.warning(f"Error at redis_services.move_meta_doc: {e}")
|
|
1038
|
+
|
|
1039
|
+
async def get_keys(self, pattern: str = "*") -> list:
|
|
1040
|
+
try:
|
|
1041
|
+
value = await self.keys(pattern)
|
|
1042
|
+
if isinstance(value, list):
|
|
1043
|
+
return value
|
|
1044
|
+
except Exception as e:
|
|
1045
|
+
logger.warning(f"Error at redis_services.get_keys: {e}")
|
|
1046
|
+
return []
|
|
1047
|
+
|
|
1048
|
+
async def del_keys(self, keys: list):
|
|
1049
|
+
try:
|
|
1050
|
+
return await self.delete(*keys)
|
|
1051
|
+
except Exception as e:
|
|
1052
|
+
logger.warning(f"Error at redis_services.del_keys {keys}: {e}")
|
|
1053
|
+
return False
|
|
1054
|
+
|
|
1055
|
+
async def get_key(self, key) -> str | None:
|
|
1056
|
+
value = await self.get(key)
|
|
1057
|
+
if isinstance(value, str):
|
|
1058
|
+
return value
|
|
1059
|
+
else:
|
|
1060
|
+
return None
|
|
1061
|
+
|
|
1062
|
+
async def getdel_key(self, key) -> str | None:
|
|
1063
|
+
value = await self.getdel(key)
|
|
1064
|
+
if isinstance(value, str):
|
|
1065
|
+
return value
|
|
1066
|
+
else:
|
|
1067
|
+
return None
|
|
1068
|
+
|
|
1069
|
+
async def set_key(self, key, value, ex=None, nx: bool = False):
|
|
1070
|
+
return await self.set(key, value, ex=ex, nx=nx)
|
|
1071
|
+
|
|
1072
|
+
async def set_ttl(self, key: str, ttl: int):
|
|
1073
|
+
return await self.expire(key, ttl)
|
|
1074
|
+
|
|
1075
|
+
async def drop_index(self, name: str, delete_docs: bool = False):
|
|
1076
|
+
try:
|
|
1077
|
+
ft_index = self.ft(name)
|
|
1078
|
+
await ft_index.dropindex(delete_docs)
|
|
1079
|
+
return True
|
|
1080
|
+
except Exception:
|
|
1081
|
+
return False
|
|
1082
|
+
|
|
1083
|
+
async def list_indices(self):
|
|
1084
|
+
x = self.ft().execute_command("FT._LIST")
|
|
1085
|
+
if x and isinstance(x, Awaitable):
|
|
1086
|
+
return await x
|
|
1087
|
+
|
|
1088
|
+
|
|
1089
|
+
|
|
1090
|
+
async def get_all_document_ids(self, index: str, search_str: str = "*") -> list[str]:
|
|
1091
|
+
# Initialize the list to hold document IDs
|
|
1092
|
+
document_ids = []
|
|
1093
|
+
|
|
1094
|
+
# Fetch all document IDs
|
|
1095
|
+
ft_index = self.ft(index)
|
|
1096
|
+
total_docs = int((await ft_index.info())['num_docs']) # type: ignore
|
|
1097
|
+
|
|
1098
|
+
batch_size = 10000 # You can adjust the batch size based on your needs
|
|
1099
|
+
|
|
1100
|
+
for offset in range(0, total_docs, batch_size):
|
|
1101
|
+
query = Query(search_str).paging(offset, batch_size)
|
|
1102
|
+
results = ft_index.search(query) # type: ignore
|
|
1103
|
+
if results and isinstance(results, Awaitable):
|
|
1104
|
+
results = await results
|
|
1105
|
+
|
|
1106
|
+
if 'results' not in results or not isinstance(results['results'], list): # type: ignore
|
|
1107
|
+
break
|
|
1108
|
+
document_ids.extend([doc['id'] for doc in results['results']]) #type: ignore
|
|
1109
|
+
|
|
1110
|
+
return document_ids
|