dmart 1.4.40.post8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dmart/__init__.py +7 -0
- dmart/alembic/README +1 -0
- dmart/alembic/__init__.py +0 -0
- dmart/alembic/env.py +91 -0
- dmart/alembic/notes.txt +11 -0
- dmart/alembic/script.py.mako +28 -0
- dmart/alembic/scripts/__init__.py +0 -0
- dmart/alembic/scripts/calculate_checksums.py +77 -0
- dmart/alembic/scripts/migration_f7a4949eed19.py +28 -0
- dmart/alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- dmart/alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
- dmart/alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- dmart/alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- dmart/alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
- dmart/alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- dmart/alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- dmart/alembic/versions/74288ccbd3b5_initial.py +264 -0
- dmart/alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
- dmart/alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- dmart/alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- dmart/alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- dmart/alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- dmart/alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- dmart/alembic/versions/__init__.py +0 -0
- dmart/alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- dmart/alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- dmart/alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- dmart/alembic.ini +117 -0
- dmart/api/__init__.py +0 -0
- dmart/api/info/__init__.py +0 -0
- dmart/api/info/router.py +109 -0
- dmart/api/managed/__init__.py +0 -0
- dmart/api/managed/router.py +1541 -0
- dmart/api/managed/utils.py +1879 -0
- dmart/api/public/__init__.py +0 -0
- dmart/api/public/router.py +758 -0
- dmart/api/qr/__init__.py +0 -0
- dmart/api/qr/router.py +108 -0
- dmart/api/user/__init__.py +0 -0
- dmart/api/user/model/__init__.py +0 -0
- dmart/api/user/model/errors.py +14 -0
- dmart/api/user/model/requests.py +165 -0
- dmart/api/user/model/responses.py +11 -0
- dmart/api/user/router.py +1413 -0
- dmart/api/user/service.py +270 -0
- dmart/bundler.py +52 -0
- dmart/cli.py +1133 -0
- dmart/config/__init__.py +0 -0
- dmart/config/channels.json +11 -0
- dmart/config/notification.json +17 -0
- dmart/config.env.sample +27 -0
- dmart/config.ini.sample +7 -0
- dmart/conftest.py +13 -0
- dmart/curl.sh +196 -0
- dmart/cxb/__init__.py +0 -0
- dmart/cxb/assets/@codemirror-Rn7_6DkE.js +10 -0
- dmart/cxb/assets/@edraj-CS4NwVbD.js +1 -0
- dmart/cxb/assets/@floating-ui-BwwcF-xh.js +1 -0
- dmart/cxb/assets/@formatjs-yKEsAtjs.js +1 -0
- dmart/cxb/assets/@fortawesome-DRW1UCdr.js +9 -0
- dmart/cxb/assets/@jsonquerylang-laKNoFFq.js +12 -0
- dmart/cxb/assets/@lezer-za4Q-8Ew.js +1 -0
- dmart/cxb/assets/@marijn-DXwl3gUT.js +1 -0
- dmart/cxb/assets/@popperjs-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/@replit--ERk53eB.js +1 -0
- dmart/cxb/assets/@roxi-CGMFK4i8.js +6 -0
- dmart/cxb/assets/@typewriter-cCzskkIv.js +17 -0
- dmart/cxb/assets/@zerodevx-BlBZjKxu.js +1 -0
- dmart/cxb/assets/@zerodevx-CVEpe6WZ.css +1 -0
- dmart/cxb/assets/BreadCrumbLite-DAhOx38v.js +1 -0
- dmart/cxb/assets/EntryRenderer-CCqV8Rkg.js +32 -0
- dmart/cxb/assets/EntryRenderer-DXytdFp9.css +1 -0
- dmart/cxb/assets/ListView-BQelo7vZ.js +16 -0
- dmart/cxb/assets/ListView-U8of-_c-.css +1 -0
- dmart/cxb/assets/Prism--hMplq-p.js +3 -0
- dmart/cxb/assets/Prism-Uh6uStUw.css +1 -0
- dmart/cxb/assets/Table2Cols-BsbwicQm.js +1 -0
- dmart/cxb/assets/_..-BvT6vdHa.css +1 -0
- dmart/cxb/assets/_...404_-fuLH_rX9.js +2 -0
- dmart/cxb/assets/_...fallback_-Ba_NLmAE.js +1 -0
- dmart/cxb/assets/_module-3HrtKAWo.js +3 -0
- dmart/cxb/assets/_module-DFKFq0AM.js +4 -0
- dmart/cxb/assets/_module-Dgq0ZVtz.js +1 -0
- dmart/cxb/assets/ajv-Cpj98o6Y.js +1 -0
- dmart/cxb/assets/axios-CG2WSiiR.js +6 -0
- dmart/cxb/assets/clsx-B-dksMZM.js +1 -0
- dmart/cxb/assets/codemirror-wrapped-line-indent-DPhKvljI.js +1 -0
- dmart/cxb/assets/compare-C3AjiGFR.js +1 -0
- dmart/cxb/assets/compute-scroll-into-view-Bl8rNFhg.js +1 -0
- dmart/cxb/assets/consolite-DlCuI0F9.js +1 -0
- dmart/cxb/assets/crelt-C8TCjufn.js +1 -0
- dmart/cxb/assets/date-fns-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/deepmerge-rn4rBaHU.js +1 -0
- dmart/cxb/assets/dmart_services-AL6-IdDE.js +1 -0
- dmart/cxb/assets/downloadFile-D08i0YDh.js +1 -0
- dmart/cxb/assets/easy-signal-BiPFIK3O.js +1 -0
- dmart/cxb/assets/esm-env-rsSWfq8L.js +1 -0
- dmart/cxb/assets/export-OF_rTiXu.js +1 -0
- dmart/cxb/assets/fast-deep-equal-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/fast-diff-C-IidNf4.js +1 -0
- dmart/cxb/assets/fast-uri-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/flowbite-svelte-BLvjb-sa.js +1 -0
- dmart/cxb/assets/flowbite-svelte-CD54FDqW.css +1 -0
- dmart/cxb/assets/flowbite-svelte-icons-BI8GVhw_.js +1 -0
- dmart/cxb/assets/github-slugger-CQ4oX9Ud.js +1 -0
- dmart/cxb/assets/global-igKv-1g9.js +1 -0
- dmart/cxb/assets/hookar-BMRD9G9H.js +1 -0
- dmart/cxb/assets/immutable-json-patch-DtRO2E_S.js +1 -0
- dmart/cxb/assets/import-1vE3gBat.js +1 -0
- dmart/cxb/assets/index-B-eTh-ZX.js +1 -0
- dmart/cxb/assets/index-BSsK-X71.js +1 -0
- dmart/cxb/assets/index-BVyxzKtH.js +1 -0
- dmart/cxb/assets/index-BdeNM69f.js +1 -0
- dmart/cxb/assets/index-CC-A1ipE.js +1 -0
- dmart/cxb/assets/index-CQohGiYB.js +1 -0
- dmart/cxb/assets/index-ChjnkpdZ.js +4 -0
- dmart/cxb/assets/index-DLP7csA4.js +1 -0
- dmart/cxb/assets/index-DTfhnhwd.js +1 -0
- dmart/cxb/assets/index-DdXRK7n9.js +2 -0
- dmart/cxb/assets/index-DtiCmB4o.js +1 -0
- dmart/cxb/assets/index-NBrXBlLA.css +2 -0
- dmart/cxb/assets/index-X1uNehO7.js +1 -0
- dmart/cxb/assets/index-nrQW6Nrr.js +1 -0
- dmart/cxb/assets/info-B986lRiM.js +1 -0
- dmart/cxb/assets/intl-messageformat-Dc5UU-HB.js +3 -0
- dmart/cxb/assets/jmespath-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/json-schema-traverse-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/json-source-map-DRgZidqy.js +5 -0
- dmart/cxb/assets/jsonpath-plus-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/jsonrepair-B30Dx381.js +8 -0
- dmart/cxb/assets/lodash-es-DZVAA2ox.js +1 -0
- dmart/cxb/assets/marked-DKjyhwJX.js +56 -0
- dmart/cxb/assets/marked-gfm-heading-id-U5zO829x.js +2 -0
- dmart/cxb/assets/marked-mangle-CDMeiHC6.js +1 -0
- dmart/cxb/assets/memoize-one-BdPwpGay.js +1 -0
- dmart/cxb/assets/natural-compare-lite-Bg2Xcf-o.js +7 -0
- dmart/cxb/assets/pagination-svelte-D5CyoiE_.js +13 -0
- dmart/cxb/assets/pagination-svelte-v10nAbbM.css +1 -0
- dmart/cxb/assets/plantuml-encoder-C47mzt9T.js +1 -0
- dmart/cxb/assets/prismjs-DTUiLGJu.js +9 -0
- dmart/cxb/assets/profile-BUf-tKMe.js +1 -0
- dmart/cxb/assets/query-CNmXTsgf.js +1 -0
- dmart/cxb/assets/queryHelpers-C9iBWwqe.js +1 -0
- dmart/cxb/assets/scroll-into-view-if-needed-KR58zyjF.js +1 -0
- dmart/cxb/assets/spaces-0oyGvpii.js +1 -0
- dmart/cxb/assets/style-mod-Bs6eFhZE.js +3 -0
- dmart/cxb/assets/svelte-B2XmcTi_.js +4 -0
- dmart/cxb/assets/svelte-awesome-COLlx0DN.css +1 -0
- dmart/cxb/assets/svelte-awesome-DhnMA6Q_.js +1 -0
- dmart/cxb/assets/svelte-datatables-net-CY7LBj6I.js +1 -0
- dmart/cxb/assets/svelte-floating-ui-BlS3sOAQ.js +1 -0
- dmart/cxb/assets/svelte-i18n-CT2KkQaN.js +3 -0
- dmart/cxb/assets/svelte-jsoneditor-BzfX6Usi.css +1 -0
- dmart/cxb/assets/svelte-jsoneditor-CUGSvWId.js +25 -0
- dmart/cxb/assets/svelte-select-CegQKzqH.css +1 -0
- dmart/cxb/assets/svelte-select-CjHAt_85.js +6 -0
- dmart/cxb/assets/tailwind-merge-CJvxXMcu.js +1 -0
- dmart/cxb/assets/tailwind-variants-Cj20BoQ3.js +1 -0
- dmart/cxb/assets/toast-B9WDyfyI.js +1 -0
- dmart/cxb/assets/tslib-pJfR_DrR.js +1 -0
- dmart/cxb/assets/typewriter-editor-DkTVIJdm.js +25 -0
- dmart/cxb/assets/user-DeK_NB5v.js +1 -0
- dmart/cxb/assets/vanilla-picker-l5rcX3cq.js +8 -0
- dmart/cxb/assets/w3c-keyname-Vcq4gwWv.js +1 -0
- dmart/cxb/config.json +11 -0
- dmart/cxb/config.sample.json +11 -0
- dmart/cxb/favicon.ico +0 -0
- dmart/cxb/favicon.png +0 -0
- dmart/cxb/index.html +28 -0
- dmart/data_adapters/__init__.py +0 -0
- dmart/data_adapters/adapter.py +16 -0
- dmart/data_adapters/base_data_adapter.py +467 -0
- dmart/data_adapters/file/__init__.py +0 -0
- dmart/data_adapters/file/adapter.py +2043 -0
- dmart/data_adapters/file/adapter_helpers.py +1013 -0
- dmart/data_adapters/file/archive.py +150 -0
- dmart/data_adapters/file/create_index.py +331 -0
- dmart/data_adapters/file/create_users_folders.py +52 -0
- dmart/data_adapters/file/custom_validations.py +68 -0
- dmart/data_adapters/file/drop_index.py +40 -0
- dmart/data_adapters/file/health_check.py +560 -0
- dmart/data_adapters/file/redis_services.py +1110 -0
- dmart/data_adapters/helpers.py +27 -0
- dmart/data_adapters/sql/__init__.py +0 -0
- dmart/data_adapters/sql/adapter.py +3218 -0
- dmart/data_adapters/sql/adapter_helpers.py +491 -0
- dmart/data_adapters/sql/create_tables.py +451 -0
- dmart/data_adapters/sql/create_users_folders.py +53 -0
- dmart/data_adapters/sql/db_to_json_migration.py +485 -0
- dmart/data_adapters/sql/health_check_sql.py +232 -0
- dmart/data_adapters/sql/json_to_db_migration.py +454 -0
- dmart/data_adapters/sql/update_query_policies.py +101 -0
- dmart/data_generator.py +81 -0
- dmart/dmart.py +761 -0
- dmart/get_settings.py +7 -0
- dmart/hypercorn_config.toml +3 -0
- dmart/info.json +1 -0
- dmart/languages/__init__.py +0 -0
- dmart/languages/arabic.json +15 -0
- dmart/languages/english.json +16 -0
- dmart/languages/kurdish.json +14 -0
- dmart/languages/loader.py +12 -0
- dmart/login_creds.sh +7 -0
- dmart/login_creds.sh.sample +7 -0
- dmart/main.py +563 -0
- dmart/manifest.sh +12 -0
- dmart/migrate.py +24 -0
- dmart/models/__init__.py +0 -0
- dmart/models/api.py +203 -0
- dmart/models/core.py +597 -0
- dmart/models/enums.py +255 -0
- dmart/password_gen.py +8 -0
- dmart/plugins/__init__.py +0 -0
- dmart/plugins/action_log/__init__.py +0 -0
- dmart/plugins/action_log/config.json +13 -0
- dmart/plugins/action_log/plugin.py +121 -0
- dmart/plugins/admin_notification_sender/__init__.py +0 -0
- dmart/plugins/admin_notification_sender/config.json +13 -0
- dmart/plugins/admin_notification_sender/plugin.py +124 -0
- dmart/plugins/ldap_manager/__init__.py +0 -0
- dmart/plugins/ldap_manager/config.json +12 -0
- dmart/plugins/ldap_manager/dmart.schema +146 -0
- dmart/plugins/ldap_manager/plugin.py +100 -0
- dmart/plugins/ldap_manager/slapd.conf +53 -0
- dmart/plugins/local_notification/__init__.py +0 -0
- dmart/plugins/local_notification/config.json +13 -0
- dmart/plugins/local_notification/plugin.py +123 -0
- dmart/plugins/realtime_updates_notifier/__init__.py +0 -0
- dmart/plugins/realtime_updates_notifier/config.json +12 -0
- dmart/plugins/realtime_updates_notifier/plugin.py +58 -0
- dmart/plugins/redis_db_update/__init__.py +0 -0
- dmart/plugins/redis_db_update/config.json +13 -0
- dmart/plugins/redis_db_update/plugin.py +188 -0
- dmart/plugins/resource_folders_creation/__init__.py +0 -0
- dmart/plugins/resource_folders_creation/config.json +12 -0
- dmart/plugins/resource_folders_creation/plugin.py +81 -0
- dmart/plugins/system_notification_sender/__init__.py +0 -0
- dmart/plugins/system_notification_sender/config.json +13 -0
- dmart/plugins/system_notification_sender/plugin.py +188 -0
- dmart/plugins/update_access_controls/__init__.py +0 -0
- dmart/plugins/update_access_controls/config.json +12 -0
- dmart/plugins/update_access_controls/plugin.py +9 -0
- dmart/publish.sh +57 -0
- dmart/pylint.sh +16 -0
- dmart/pyrightconfig.json +7 -0
- dmart/redis_connections.sh +13 -0
- dmart/reload.sh +56 -0
- dmart/run.sh +3 -0
- dmart/run_notification_campaign.py +85 -0
- dmart/sample/spaces/applications/.dm/meta.space.json +30 -0
- dmart/sample/spaces/applications/api/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/.dm/query_all_applications/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/.dm/test_by_saad/attachments.media/meta.warframe.json +1 -0
- dmart/sample/spaces/applications/api/.dm/test_by_saad/attachments.media/warframe.png +0 -0
- dmart/sample/spaces/applications/api/.dm/test_by_saad/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/.dm/user_profile/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/create_log/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/create_public_logs/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_all_translated_data/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_logs/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_translated_enums/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_translated_others/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_translated_resolution/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/create_log.json +1 -0
- dmart/sample/spaces/applications/api/applications/create_public_logs.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_all_translated_data.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_logs.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_translated_enums.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_translated_others.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_translated_resolution.json +1 -0
- dmart/sample/spaces/applications/api/applications.json +1 -0
- dmart/sample/spaces/applications/api/management/.dm/create_subaccount/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/management/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/management/.dm/update_password/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/management/create_subaccount.json +53 -0
- dmart/sample/spaces/applications/api/management/update_password.json +1 -0
- dmart/sample/spaces/applications/api/management.json +1 -0
- dmart/sample/spaces/applications/api/query_all_applications.json +15 -0
- dmart/sample/spaces/applications/api/test_by_saad.json +1 -0
- dmart/sample/spaces/applications/api/user/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/user/.dm/test_by_saad/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/user/.dm/user_profile/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/user/test_by_saad.json +1 -0
- dmart/sample/spaces/applications/api/user/user_profile.json +1 -0
- dmart/sample/spaces/applications/api/user_profile.json +1 -0
- dmart/sample/spaces/applications/api.json +1 -0
- dmart/sample/spaces/applications/collections/.dm/meta.folder.json +19 -0
- dmart/sample/spaces/applications/collections.json +1 -0
- dmart/sample/spaces/applications/configurations/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/configurations/time_out.json +1 -0
- dmart/sample/spaces/applications/configurations.json +19 -0
- dmart/sample/spaces/applications/errors.json +1 -0
- dmart/sample/spaces/applications/logs/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/logs.json +1 -0
- dmart/sample/spaces/applications/queries/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/queries/.dm/order/meta.content.json +1 -0
- dmart/sample/spaces/applications/queries/order.json +1 -0
- dmart/sample/spaces/applications/queries.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/api/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/configuration/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/error/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/log/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/query/meta.schema.json +16 -0
- dmart/sample/spaces/applications/schema/.dm/translation/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/api.json +28 -0
- dmart/sample/spaces/applications/schema/configuration.json +1 -0
- dmart/sample/spaces/applications/schema/error.json +43 -0
- dmart/sample/spaces/applications/schema/log.json +1 -0
- dmart/sample/spaces/applications/schema/query.json +118 -0
- dmart/sample/spaces/applications/schema/translation.json +26 -0
- dmart/sample/spaces/applications/schema.json +1 -0
- dmart/sample/spaces/applications/translations/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/translations.json +1 -0
- dmart/sample/spaces/archive/.dm/meta.space.json +27 -0
- dmart/sample/spaces/custom_plugins/dummy/__pycache__/plugin.cpython-314.pyc +0 -0
- dmart/sample/spaces/custom_plugins/dummy/config.json +28 -0
- dmart/sample/spaces/custom_plugins/dummy/plugin.py +6 -0
- dmart/sample/spaces/custom_plugins/missed_entry/config.json +12 -0
- dmart/sample/spaces/custom_plugins/missed_entry/plugin.py +119 -0
- dmart/sample/spaces/custom_plugins/own_changed_notification/__pycache__/plugin.cpython-314.pyc +0 -0
- dmart/sample/spaces/custom_plugins/own_changed_notification/config.json +12 -0
- dmart/sample/spaces/custom_plugins/own_changed_notification/plugin.py +65 -0
- dmart/sample/spaces/custom_plugins/reports_stats/config.json +14 -0
- dmart/sample/spaces/custom_plugins/reports_stats/plugin.py +82 -0
- dmart/sample/spaces/custom_plugins/system_notification_sender/config.json +22 -0
- dmart/sample/spaces/custom_plugins/system_notification_sender/notification.py +268 -0
- dmart/sample/spaces/custom_plugins/system_notification_sender/plugin.py +98 -0
- dmart/sample/spaces/management/.dm/events.jsonl +32 -0
- dmart/sample/spaces/management/.dm/meta.space.json +48 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/admin.json +36 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/meta.admin.json +1 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/meta.system.json +1 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/system.json +32 -0
- dmart/sample/spaces/management/collections/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/collections.json +1 -0
- dmart/sample/spaces/management/groups/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/groups.json +1 -0
- dmart/sample/spaces/management/health_check/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/health_check.json +1 -0
- dmart/sample/spaces/management/notifications/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/notifications/admin/.dm/meta.folder.json +9 -0
- dmart/sample/spaces/management/notifications/system/.dm/meta.folder.json +9 -0
- dmart/sample/spaces/management/notifications.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/access_applications/meta.permission.json +31 -0
- dmart/sample/spaces/management/permissions/.dm/access_applications_world/meta.permission.json +31 -0
- dmart/sample/spaces/management/permissions/.dm/access_messages/meta.permission.json +23 -0
- dmart/sample/spaces/management/permissions/.dm/access_personal/meta.permission.json +40 -0
- dmart/sample/spaces/management/permissions/.dm/access_protected/meta.permission.json +33 -0
- dmart/sample/spaces/management/permissions/.dm/access_public/meta.permission.json +24 -0
- dmart/sample/spaces/management/permissions/.dm/browse_all_folders/meta.permission.json +23 -0
- dmart/sample/spaces/management/permissions/.dm/create_log/meta.permission.json +24 -0
- dmart/sample/spaces/management/permissions/.dm/interviewer/meta.permission.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/manage_applications/meta.permission.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/manage_debug/meta.permission.json +25 -0
- dmart/sample/spaces/management/permissions/.dm/manage_spaces/meta.permission.json +24 -0
- dmart/sample/spaces/management/permissions/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/rules_management_default/meta.permission.json +32 -0
- dmart/sample/spaces/management/permissions/.dm/super_manager/meta.permission.json +52 -0
- dmart/sample/spaces/management/permissions/.dm/view_activity_log/meta.permission.json +26 -0
- dmart/sample/spaces/management/permissions/.dm/view_collections/meta.permission.json +29 -0
- dmart/sample/spaces/management/permissions/.dm/view_logs/meta.permission.json +30 -0
- dmart/sample/spaces/management/permissions/.dm/view_roles/meta.permission.json +29 -0
- dmart/sample/spaces/management/permissions/.dm/view_users/meta.permission.json +25 -0
- dmart/sample/spaces/management/permissions/.dm/view_world/meta.permission.json +31 -0
- dmart/sample/spaces/management/permissions/.dm/world/meta.permission.json +35 -0
- dmart/sample/spaces/management/permissions.json +1 -0
- dmart/sample/spaces/management/requests.json +1 -0
- dmart/sample/spaces/management/roles/.dm/dummy/meta.role.json +12 -0
- dmart/sample/spaces/management/roles/.dm/logged_in/meta.role.json +18 -0
- dmart/sample/spaces/management/roles/.dm/manager/meta.role.json +13 -0
- dmart/sample/spaces/management/roles/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/roles/.dm/moderator/meta.role.json +13 -0
- dmart/sample/spaces/management/roles/.dm/super_admin/meta.role.json +14 -0
- dmart/sample/spaces/management/roles/.dm/test_role/meta.role.json +13 -0
- dmart/sample/spaces/management/roles/.dm/world/meta.role.json +15 -0
- dmart/sample/spaces/management/roles.json +1 -0
- dmart/sample/spaces/management/schema/.dm/admin_notification_request/attachments.media/meta.ui_schema.json +10 -0
- dmart/sample/spaces/management/schema/.dm/admin_notification_request/attachments.media/ui_schema.json +32 -0
- dmart/sample/spaces/management/schema/.dm/admin_notification_request/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/api/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/folder_rendering/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/health_check/meta.schema.json +17 -0
- dmart/sample/spaces/management/schema/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/schema/.dm/meta_schema/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/metafile/meta.schema.json +14 -0
- dmart/sample/spaces/management/schema/.dm/notification/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/system_notification_request/attachments.media/meta.ui_schema.json +10 -0
- dmart/sample/spaces/management/schema/.dm/system_notification_request/attachments.media/ui_schema.json +32 -0
- dmart/sample/spaces/management/schema/.dm/system_notification_request/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/view/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/workflow/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/admin_notification_request.json +89 -0
- dmart/sample/spaces/management/schema/api.json +1 -0
- dmart/sample/spaces/management/schema/folder_rendering.json +238 -0
- dmart/sample/spaces/management/schema/health_check.json +8 -0
- dmart/sample/spaces/management/schema/meta_schema.json +74 -0
- dmart/sample/spaces/management/schema/metafile.json +153 -0
- dmart/sample/spaces/management/schema/notification.json +28 -0
- dmart/sample/spaces/management/schema/system_notification_request.json +57 -0
- dmart/sample/spaces/management/schema/view.json +23 -0
- dmart/sample/spaces/management/schema/workflow.json +87 -0
- dmart/sample/spaces/management/schema.json +1 -0
- dmart/sample/spaces/management/users/.dm/alibaba/meta.user.json +23 -0
- dmart/sample/spaces/management/users/.dm/anonymous/meta.user.json +18 -0
- dmart/sample/spaces/management/users/.dm/dmart/meta.user.json +26 -0
- dmart/sample/spaces/management/users/.dm/meta.folder.json +14 -0
- dmart/sample/spaces/management/workflows/.dm/channel/meta.content.json +1 -0
- dmart/sample/spaces/management/workflows/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/workflows/channel.json +148 -0
- dmart/sample/spaces/management/workflows.json +1 -0
- dmart/sample/spaces/maqola/.dm/meta.space.json +33 -0
- dmart/sample/spaces/personal/.dm/meta.space.json +24 -0
- dmart/sample/spaces/personal/people/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/.dm/0b5f7e7f/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/.dm/mytest/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/0b5f7e7f.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/mytest.json +1 -0
- dmart/sample/spaces/personal/people/dmart/notifications/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/private/.dm/inner/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/private/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/private/inner.json +1 -0
- dmart/sample/spaces/personal/people/dmart/protected/.dm/avatar/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/protected/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/protected/avatar.png +0 -0
- dmart/sample/spaces/personal/people/dmart/public/.dm/meta.folder.json +1 -0
- dmart/sample/test/.gitignore +2 -0
- dmart/sample/test/createcontent.json +9 -0
- dmart/sample/test/createmedia.json +9 -0
- dmart/sample/test/createmedia_entry.json +6 -0
- dmart/sample/test/createschema.json +8 -0
- dmart/sample/test/createschemawork.json +11 -0
- dmart/sample/test/createticket.json +13 -0
- dmart/sample/test/data.json +4 -0
- dmart/sample/test/deletecontent.json +12 -0
- dmart/sample/test/logo.jpeg +0 -0
- dmart/sample/test/my.jpg +0 -0
- dmart/sample/test/myticket.json +23 -0
- dmart/sample/test/resources.csv +12 -0
- dmart/sample/test/schema.json +16 -0
- dmart/sample/test/temp.json +1 -0
- dmart/sample/test/test.dmart +45 -0
- dmart/sample/test/ticket_schema.json +23 -0
- dmart/sample/test/ticket_workflow.json +85 -0
- dmart/sample/test/ticketbody.json +4 -0
- dmart/sample/test/ticketcontent.json +14 -0
- dmart/sample/test/updatecontent.json +20 -0
- dmart/sample/test/workflow_schema.json +68 -0
- dmart/scheduled_notification_handler.py +121 -0
- dmart/schema_migration.py +208 -0
- dmart/schema_modulate.py +192 -0
- dmart/set_admin_passwd.py +75 -0
- dmart/sync.py +202 -0
- dmart/test_utils.py +34 -0
- dmart/utils/__init__.py +0 -0
- dmart/utils/access_control.py +306 -0
- dmart/utils/async_request.py +8 -0
- dmart/utils/exporter.py +309 -0
- dmart/utils/firebase_notifier.py +57 -0
- dmart/utils/generate_email.py +37 -0
- dmart/utils/helpers.py +352 -0
- dmart/utils/hypercorn_config.py +12 -0
- dmart/utils/internal_error_code.py +60 -0
- dmart/utils/jwt.py +124 -0
- dmart/utils/logger.py +167 -0
- dmart/utils/middleware.py +99 -0
- dmart/utils/notification.py +75 -0
- dmart/utils/password_hashing.py +16 -0
- dmart/utils/plugin_manager.py +202 -0
- dmart/utils/query_policies_helper.py +128 -0
- dmart/utils/regex.py +44 -0
- dmart/utils/repository.py +529 -0
- dmart/utils/router_helper.py +19 -0
- dmart/utils/settings.py +212 -0
- dmart/utils/sms_notifier.py +21 -0
- dmart/utils/social_sso.py +67 -0
- dmart/utils/templates/activation.html.j2 +26 -0
- dmart/utils/templates/reminder.html.j2 +17 -0
- dmart/utils/ticket_sys_utils.py +203 -0
- dmart/utils/web_notifier.py +29 -0
- dmart/websocket.py +231 -0
- dmart-1.4.40.post8.dist-info/METADATA +75 -0
- dmart-1.4.40.post8.dist-info/RECORD +489 -0
- dmart-1.4.40.post8.dist-info/WHEEL +5 -0
- dmart-1.4.40.post8.dist-info/entry_points.txt +2 -0
- dmart-1.4.40.post8.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,3218 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import time
|
|
6
|
+
import hashlib
|
|
7
|
+
from contextlib import asynccontextmanager
|
|
8
|
+
from copy import copy
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, Type, Tuple
|
|
12
|
+
from uuid import uuid4
|
|
13
|
+
import ast
|
|
14
|
+
from fastapi import status
|
|
15
|
+
from fastapi.logger import logger
|
|
16
|
+
from sqlalchemy import literal_column, or_
|
|
17
|
+
from sqlalchemy.orm import sessionmaker, defer
|
|
18
|
+
from sqlmodel import Session, select, col, delete, update, Integer, Float, Boolean, func, text
|
|
19
|
+
from sqlalchemy import String, cast, bindparam
|
|
20
|
+
import io
|
|
21
|
+
from sys import modules as sys_modules
|
|
22
|
+
import models.api as api
|
|
23
|
+
from models.api import Exception as API_Exception, Error as API_Error
|
|
24
|
+
import models.core as core
|
|
25
|
+
from models.enums import QueryType, LockAction, ResourceType, SortType
|
|
26
|
+
from data_adapters.sql.create_tables import (
|
|
27
|
+
Entries,
|
|
28
|
+
Histories,
|
|
29
|
+
Permissions,
|
|
30
|
+
Roles,
|
|
31
|
+
Users,
|
|
32
|
+
Spaces,
|
|
33
|
+
Attachments,
|
|
34
|
+
Locks,
|
|
35
|
+
Sessions,
|
|
36
|
+
Invitations,
|
|
37
|
+
URLShorts,
|
|
38
|
+
OTP,
|
|
39
|
+
)
|
|
40
|
+
from utils.helpers import (
|
|
41
|
+
arr_remove_common,
|
|
42
|
+
get_removed_items,
|
|
43
|
+
camel_case, resolve_schema_references,
|
|
44
|
+
)
|
|
45
|
+
from utils.internal_error_code import InternalErrorCode
|
|
46
|
+
from utils.middleware import get_request_data
|
|
47
|
+
from utils.password_hashing import hash_password, verify_password
|
|
48
|
+
from utils.query_policies_helper import get_user_query_policies, generate_query_policies
|
|
49
|
+
from utils.settings import settings
|
|
50
|
+
from data_adapters.base_data_adapter import BaseDataAdapter, MetaChild
|
|
51
|
+
from data_adapters.sql.adapter_helpers import (
|
|
52
|
+
set_results_from_aggregation, set_table_for_query, events_query,
|
|
53
|
+
subpath_checker, parse_search_string,
|
|
54
|
+
sqlite_aggregate_functions, mysql_aggregate_functions,
|
|
55
|
+
postgres_aggregate_functions, transform_keys_to_sql,
|
|
56
|
+
get_next_date_value, is_date_time_value,
|
|
57
|
+
# build_query_filter_for_allowed_field_values
|
|
58
|
+
)
|
|
59
|
+
from data_adapters.helpers import get_nested_value, trans_magic_words
|
|
60
|
+
from jsonschema import Draft7Validator
|
|
61
|
+
from starlette.datastructures import UploadFile
|
|
62
|
+
from sqlalchemy import URL
|
|
63
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def query_attachment_aggregation(subpath):
|
|
67
|
+
return select(
|
|
68
|
+
literal_column("resource_type").label("resource_type"),
|
|
69
|
+
func.count(text("*")).label("count")
|
|
70
|
+
).group_by(text("resource_type")) \
|
|
71
|
+
.where(col(Attachments.subpath) == subpath)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def query_aggregation(table, query):
|
|
75
|
+
aggregate_functions: list = []
|
|
76
|
+
|
|
77
|
+
if "sqlite" in settings.database_driver:
|
|
78
|
+
aggregate_functions = sqlite_aggregate_functions
|
|
79
|
+
elif "mysql" in settings.database_driver:
|
|
80
|
+
aggregate_functions = mysql_aggregate_functions
|
|
81
|
+
elif "postgresql" in settings.database_driver:
|
|
82
|
+
aggregate_functions = postgres_aggregate_functions
|
|
83
|
+
|
|
84
|
+
def _normalize_json_path(path: str) -> str:
|
|
85
|
+
if path.startswith("@"):
|
|
86
|
+
path = path[1:]
|
|
87
|
+
if path.startswith("body."):
|
|
88
|
+
return f"payload.{path}"
|
|
89
|
+
return path
|
|
90
|
+
|
|
91
|
+
def _selectable_for_load(item: str):
|
|
92
|
+
if item.startswith("@"):
|
|
93
|
+
col_name = item.replace("@", "")
|
|
94
|
+
return getattr(table, col_name)
|
|
95
|
+
|
|
96
|
+
if hasattr(table, item):
|
|
97
|
+
return getattr(table, item)
|
|
98
|
+
|
|
99
|
+
json_path = _normalize_json_path(item)
|
|
100
|
+
expr = transform_keys_to_sql(json_path)
|
|
101
|
+
alias = item.replace(".", "_")
|
|
102
|
+
return text(expr).label(alias)
|
|
103
|
+
|
|
104
|
+
statement = select(*[_selectable_for_load(ll) for ll in query.aggregation_data.load])
|
|
105
|
+
|
|
106
|
+
if bool(query.aggregation_data.group_by):
|
|
107
|
+
group_by_exprs = []
|
|
108
|
+
for gb in query.aggregation_data.group_by:
|
|
109
|
+
if gb.startswith("@"):
|
|
110
|
+
group_by_exprs.append(table.__dict__[gb.replace("@", "")])
|
|
111
|
+
elif hasattr(table, gb):
|
|
112
|
+
group_by_exprs.append(getattr(table, gb))
|
|
113
|
+
else:
|
|
114
|
+
json_path = _normalize_json_path(gb)
|
|
115
|
+
expr = transform_keys_to_sql(json_path)
|
|
116
|
+
group_by_exprs.append(text(expr))
|
|
117
|
+
if group_by_exprs:
|
|
118
|
+
statement = statement.group_by(*group_by_exprs)
|
|
119
|
+
|
|
120
|
+
if bool(query.aggregation_data.reducers):
|
|
121
|
+
agg_selects = []
|
|
122
|
+
for reducer in query.aggregation_data.reducers:
|
|
123
|
+
if reducer.reducer_name in aggregate_functions:
|
|
124
|
+
field_expr_str: str
|
|
125
|
+
if len(reducer.args) == 0:
|
|
126
|
+
field_expr_str = "*"
|
|
127
|
+
else:
|
|
128
|
+
arg0 = reducer.args[0]
|
|
129
|
+
arg0 = _normalize_json_path(arg0)
|
|
130
|
+
base_arg = arg0
|
|
131
|
+
if hasattr(table, base_arg):
|
|
132
|
+
field = getattr(table, base_arg)
|
|
133
|
+
if field is None:
|
|
134
|
+
continue
|
|
135
|
+
if isinstance(field.type, Integer) or isinstance(field.type, Boolean):
|
|
136
|
+
field_expr_str = f"{field}::int"
|
|
137
|
+
elif isinstance(field.type, Float):
|
|
138
|
+
field_expr_str = f"{field}::float"
|
|
139
|
+
else:
|
|
140
|
+
field_expr_str = f"{field}::text"
|
|
141
|
+
else:
|
|
142
|
+
jp = transform_keys_to_sql(arg0)
|
|
143
|
+
if reducer.reducer_name in ("sum", "avg", "total"):
|
|
144
|
+
field_expr_str = f"({jp})::float"
|
|
145
|
+
elif reducer.reducer_name in ("count", "r_count"):
|
|
146
|
+
field_expr_str = "*"
|
|
147
|
+
elif reducer.reducer_name in ("min", "max", "group_concat"):
|
|
148
|
+
field_expr_str = f"({jp})::text"
|
|
149
|
+
else:
|
|
150
|
+
field_expr_str = f"({jp})"
|
|
151
|
+
agg_selects.append(
|
|
152
|
+
getattr(func, reducer.reducer_name)(text(field_expr_str)).label(reducer.alias)
|
|
153
|
+
)
|
|
154
|
+
if agg_selects:
|
|
155
|
+
cols = list(statement.selected_columns) + agg_selects
|
|
156
|
+
statement = statement.with_only_columns(*cols)
|
|
157
|
+
return statement
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def string_to_list(input_str):
|
|
161
|
+
if isinstance(input_str, list):
|
|
162
|
+
return input_str
|
|
163
|
+
try:
|
|
164
|
+
result = ast.literal_eval(input_str)
|
|
165
|
+
if isinstance(result, list):
|
|
166
|
+
return result
|
|
167
|
+
except (ValueError, SyntaxError):
|
|
168
|
+
return [input_str]
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def apply_acl_and_query_policies(statement, table, user_shortname, user_query_policies):
|
|
172
|
+
if table not in [Attachments, Histories] and hasattr(table, 'query_policies'):
|
|
173
|
+
access_conditions = [
|
|
174
|
+
"owner_shortname = :user_shortname",
|
|
175
|
+
"EXISTS (SELECT 1 FROM jsonb_array_elements(CASE WHEN jsonb_typeof(acl::jsonb) = 'array' THEN acl::jsonb ELSE '[]'::jsonb END) AS elem WHERE elem->>'user_shortname' = :user_shortname AND (elem->'allowed_actions') ? 'query')"
|
|
176
|
+
]
|
|
177
|
+
|
|
178
|
+
if user_query_policies:
|
|
179
|
+
raw_items = [str(p) for p in user_query_policies]
|
|
180
|
+
patterns = []
|
|
181
|
+
for item in raw_items:
|
|
182
|
+
for part in str(item).split('|'):
|
|
183
|
+
part = part.strip()
|
|
184
|
+
if part:
|
|
185
|
+
patterns.append(part.replace('*', '%'))
|
|
186
|
+
|
|
187
|
+
seen = set()
|
|
188
|
+
dedup_patterns = []
|
|
189
|
+
for pat in patterns:
|
|
190
|
+
if pat not in seen:
|
|
191
|
+
seen.add(pat)
|
|
192
|
+
dedup_patterns.append(pat)
|
|
193
|
+
|
|
194
|
+
if dedup_patterns:
|
|
195
|
+
like_clauses = []
|
|
196
|
+
like_params = {}
|
|
197
|
+
for idx, pat in enumerate(dedup_patterns):
|
|
198
|
+
param_name = f"qp_like_{idx}"
|
|
199
|
+
like_clauses.append(f"qp LIKE :{param_name}")
|
|
200
|
+
like_params[param_name] = pat
|
|
201
|
+
|
|
202
|
+
qp_exists = "EXISTS (SELECT 1 FROM unnest(query_policies) AS qp WHERE " + " OR ".join(like_clauses) + ")"
|
|
203
|
+
access_conditions.insert(1, qp_exists)
|
|
204
|
+
|
|
205
|
+
clause_str = "(" + " OR ".join(access_conditions) + ")"
|
|
206
|
+
access_filter = text(clause_str)
|
|
207
|
+
statement = statement.where(access_filter).params(
|
|
208
|
+
user_shortname=user_shortname,
|
|
209
|
+
**like_params
|
|
210
|
+
)
|
|
211
|
+
else:
|
|
212
|
+
clause_str = "(" + " OR ".join(access_conditions) + ")"
|
|
213
|
+
access_filter = text(clause_str)
|
|
214
|
+
statement = statement.where(access_filter).params(user_shortname=user_shortname)
|
|
215
|
+
else:
|
|
216
|
+
clause_str = "(" + " OR ".join(access_conditions) + ")"
|
|
217
|
+
access_filter = text(clause_str)
|
|
218
|
+
statement = statement.where(access_filter).params(user_shortname=user_shortname)
|
|
219
|
+
return statement
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
async def set_sql_statement_from_query(table, statement, query, is_for_count):
|
|
223
|
+
try:
|
|
224
|
+
if query.type == QueryType.attachments_aggregation and not is_for_count:
|
|
225
|
+
return query_attachment_aggregation(query.subpath)
|
|
226
|
+
|
|
227
|
+
if query.type == QueryType.aggregation and not is_for_count:
|
|
228
|
+
statement = query_aggregation(table, query)
|
|
229
|
+
|
|
230
|
+
if query.type == QueryType.tags and not is_for_count:
|
|
231
|
+
if query.retrieve_json_payload:
|
|
232
|
+
statement = select(
|
|
233
|
+
func.jsonb_array_elements_text(table.tags).label('tag'),
|
|
234
|
+
func.count('*').label('count')
|
|
235
|
+
).group_by('tag')
|
|
236
|
+
else:
|
|
237
|
+
statement = select(func.jsonb_array_elements_text(table.tags).label('tag')).distinct()
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
print("[!query]", e)
|
|
241
|
+
raise api.Exception(
|
|
242
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
243
|
+
error=api.Error(
|
|
244
|
+
type="query",
|
|
245
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
246
|
+
message=str(e),
|
|
247
|
+
),
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
if query.space_name:
|
|
251
|
+
statement = statement.where(table.space_name == query.space_name)
|
|
252
|
+
if query.subpath and table in [Entries, Attachments]:
|
|
253
|
+
if query.exact_subpath:
|
|
254
|
+
statement = statement.where(table.subpath == query.subpath)
|
|
255
|
+
else:
|
|
256
|
+
# Use bind parameter for the ILIKE pattern to avoid string interpolation
|
|
257
|
+
subpath_like = (f"{query.subpath}/%".replace('//', '/'))
|
|
258
|
+
statement = statement.where(
|
|
259
|
+
or_(
|
|
260
|
+
table.subpath == query.subpath,
|
|
261
|
+
text("subpath ILIKE :subpath_like").bindparams(bindparam("subpath_like"))
|
|
262
|
+
)
|
|
263
|
+
).params(subpath_like=subpath_like)
|
|
264
|
+
if query.search:
|
|
265
|
+
if not query.search.startswith("@") and not query.search.startswith("-"):
|
|
266
|
+
p = "shortname || ' ' || tags || ' ' || displayname || ' ' || description || ' ' || payload"
|
|
267
|
+
if table is Users:
|
|
268
|
+
p += " || ' ' || COALESCE(email, '') || ' ' || COALESCE(msisdn, '') || ' ' || roles"
|
|
269
|
+
if table is Roles:
|
|
270
|
+
p += " || ' ' || permissions"
|
|
271
|
+
# Parameterize search string
|
|
272
|
+
statement = statement.where(
|
|
273
|
+
text("(" + p + ") ILIKE :search")
|
|
274
|
+
).params(search=f"%{query.search}%")
|
|
275
|
+
else:
|
|
276
|
+
search_tokens = parse_search_string(query.search)
|
|
277
|
+
|
|
278
|
+
try:
|
|
279
|
+
table_columns = set(c.name for c in table.__table__.columns) # type: ignore[attr-defined]
|
|
280
|
+
except Exception:
|
|
281
|
+
table_columns = set()
|
|
282
|
+
|
|
283
|
+
def _field_exists_in_table(_field: str) -> bool:
|
|
284
|
+
if _field in table_columns:
|
|
285
|
+
return True
|
|
286
|
+
if _field.startswith('payload.') and 'payload' in table_columns:
|
|
287
|
+
return True
|
|
288
|
+
if _field.startswith('payload.body.') and 'payload' in table_columns:
|
|
289
|
+
return True
|
|
290
|
+
return False
|
|
291
|
+
|
|
292
|
+
for field, field_data in search_tokens.items():
|
|
293
|
+
if not _field_exists_in_table(field):
|
|
294
|
+
continue
|
|
295
|
+
values = field_data['values']
|
|
296
|
+
operation = field_data['operation']
|
|
297
|
+
negative = field_data.get('negative', False)
|
|
298
|
+
value_type = field_data.get('value_type', 'string')
|
|
299
|
+
format_strings = field_data.get('format_strings', {})
|
|
300
|
+
|
|
301
|
+
if not values:
|
|
302
|
+
continue
|
|
303
|
+
|
|
304
|
+
if field.startswith('payload.body.'):
|
|
305
|
+
payload_field = field.replace('payload.body.', '')
|
|
306
|
+
payload_path = '->'.join([f"'{part}'" for part in payload_field.split('.')])
|
|
307
|
+
|
|
308
|
+
payload_path_splited = payload_path.split('->')
|
|
309
|
+
if len(payload_path_splited) > 1:
|
|
310
|
+
_nested_no_last = '->'.join(payload_path_splited[:-1])
|
|
311
|
+
_last = payload_path_splited[-1]
|
|
312
|
+
_payload_text_extract = f"payload::jsonb->'body'->{_nested_no_last}->>{_last}"
|
|
313
|
+
else:
|
|
314
|
+
_payload_text_extract = f"payload::jsonb->'body'->>{payload_path}"
|
|
315
|
+
conditions = []
|
|
316
|
+
|
|
317
|
+
if value_type == 'numeric' and field_data.get('is_range', False) and len(
|
|
318
|
+
field_data.get('range_values', [])) == 2:
|
|
319
|
+
val1, val2 = field_data['range_values']
|
|
320
|
+
try:
|
|
321
|
+
num1 = float(val1)
|
|
322
|
+
num2 = float(val2)
|
|
323
|
+
if num1 > num2:
|
|
324
|
+
val1, val2 = val2, val1
|
|
325
|
+
except ValueError:
|
|
326
|
+
pass
|
|
327
|
+
if negative:
|
|
328
|
+
conditions.append(
|
|
329
|
+
f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND (payload::jsonb->'body'->>{payload_path})::float NOT BETWEEN {val1} AND {val2})")
|
|
330
|
+
else:
|
|
331
|
+
conditions.append(
|
|
332
|
+
f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND (payload::jsonb->'body'->>{payload_path})::float BETWEEN {val1} AND {val2})")
|
|
333
|
+
|
|
334
|
+
for value in values:
|
|
335
|
+
if value_type == 'datetime':
|
|
336
|
+
if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
|
|
337
|
+
range_values = field_data['range_values']
|
|
338
|
+
val1, val2 = range_values
|
|
339
|
+
if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
|
|
340
|
+
fmt1 = format_strings.get(val1)
|
|
341
|
+
fmt2 = format_strings.get(val2)
|
|
342
|
+
if fmt1 and fmt2:
|
|
343
|
+
if fmt1 == fmt2:
|
|
344
|
+
if val1 > val2:
|
|
345
|
+
val1, val2 = val2, val1
|
|
346
|
+
else:
|
|
347
|
+
try:
|
|
348
|
+
from datetime import datetime
|
|
349
|
+
dt1 = datetime.strptime(val1, fmt1.replace('YYYY', '%Y').replace('MM',
|
|
350
|
+
'%m').replace(
|
|
351
|
+
'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
|
|
352
|
+
'SS', '%S').replace('US', '%f'))
|
|
353
|
+
dt2 = datetime.strptime(val2, fmt2.replace('YYYY', '%Y').replace('MM',
|
|
354
|
+
'%m').replace(
|
|
355
|
+
'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
|
|
356
|
+
'SS', '%S').replace('US', '%f'))
|
|
357
|
+
if dt1 > dt2:
|
|
358
|
+
val1, val2 = val2, val1
|
|
359
|
+
except Exception:
|
|
360
|
+
if val1 > val2:
|
|
361
|
+
val1, val2 = val2, val1
|
|
362
|
+
else:
|
|
363
|
+
if val1 > val2:
|
|
364
|
+
val1, val2 = val2, val1
|
|
365
|
+
|
|
366
|
+
start_value, end_value = val1, val2
|
|
367
|
+
start_format = format_strings.get(start_value)
|
|
368
|
+
end_format = format_strings.get(end_value)
|
|
369
|
+
|
|
370
|
+
if start_format and end_format:
|
|
371
|
+
if negative:
|
|
372
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
|
|
373
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::text NOT BETWEEN '{start_value}' AND '{end_value}')"
|
|
374
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
375
|
+
else:
|
|
376
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
|
|
377
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::text BETWEEN '{start_value}' AND '{end_value}')"
|
|
378
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
379
|
+
else:
|
|
380
|
+
format_string = format_strings.get(value)
|
|
381
|
+
if format_string:
|
|
382
|
+
next_value = get_next_date_value(value, format_string)
|
|
383
|
+
|
|
384
|
+
if negative:
|
|
385
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND (TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{value}', '{format_string}') OR TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{next_value}', '{format_string}')))"
|
|
386
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND (({_payload_text_extract})::text < '{value}' OR ({_payload_text_extract})::text >= '{next_value}'))"
|
|
387
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
388
|
+
else:
|
|
389
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{value}', '{format_string}') AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{next_value}', '{format_string}'))"
|
|
390
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::text >= '{value}' AND ({_payload_text_extract})::text < '{next_value}')"
|
|
391
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
392
|
+
elif value_type == 'boolean':
|
|
393
|
+
for value in values:
|
|
394
|
+
bool_value = value.lower()
|
|
395
|
+
if negative:
|
|
396
|
+
bool_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'boolean' AND ({_payload_text_extract})::boolean != {bool_value})"
|
|
397
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::boolean != {bool_value})"
|
|
398
|
+
conditions.append(f"({bool_condition} OR {string_condition})")
|
|
399
|
+
else:
|
|
400
|
+
bool_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'boolean' AND ({_payload_text_extract})::boolean = {bool_value})"
|
|
401
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND ({_payload_text_extract})::boolean = {bool_value})"
|
|
402
|
+
conditions.append(f"({bool_condition} OR {string_condition})")
|
|
403
|
+
else:
|
|
404
|
+
is_numeric = False
|
|
405
|
+
if value.isnumeric():
|
|
406
|
+
is_numeric = True
|
|
407
|
+
|
|
408
|
+
if negative:
|
|
409
|
+
array_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'array' AND NOT (payload::jsonb->'body'->{payload_path} @> '[\"{value}\"]'::jsonb))"
|
|
410
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND {_payload_text_extract} != '{value}')"
|
|
411
|
+
|
|
412
|
+
if is_numeric:
|
|
413
|
+
number_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND ({_payload_text_extract})::float != {value})"
|
|
414
|
+
conditions.append(
|
|
415
|
+
f"({array_condition} OR {string_condition} OR {number_condition})")
|
|
416
|
+
else:
|
|
417
|
+
conditions.append(f"({array_condition} OR {string_condition})")
|
|
418
|
+
else:
|
|
419
|
+
array_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'array' AND payload::jsonb->'body'->{payload_path} @> '[\"{value}\"]'::jsonb)"
|
|
420
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'string' AND {_payload_text_extract} = '{value}')"
|
|
421
|
+
direct_condition = f"(payload::jsonb->'body'->{payload_path} = '\"{value}\"'::jsonb)"
|
|
422
|
+
|
|
423
|
+
if is_numeric:
|
|
424
|
+
number_condition = f"(jsonb_typeof(payload::jsonb->'body'->{payload_path}) = 'number' AND ({_payload_text_extract})::float = {value})"
|
|
425
|
+
conditions.append(
|
|
426
|
+
f"({array_condition} OR {string_condition} OR {direct_condition} OR {number_condition})")
|
|
427
|
+
else:
|
|
428
|
+
conditions.append(
|
|
429
|
+
f"({array_condition} OR {string_condition} OR {direct_condition})")
|
|
430
|
+
|
|
431
|
+
if conditions:
|
|
432
|
+
if negative:
|
|
433
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
434
|
+
else:
|
|
435
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
436
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
437
|
+
elif field.startswith('payload.'):
|
|
438
|
+
payload_field = field.replace('payload.', '')
|
|
439
|
+
payload_path = '->'.join([f"'{part}'" for part in payload_field.split('.')])
|
|
440
|
+
|
|
441
|
+
payload_path_splited = payload_path.split('->')
|
|
442
|
+
if len(payload_path_splited) > 1:
|
|
443
|
+
_nested_no_last = '->'.join(payload_path_splited[:-1])
|
|
444
|
+
_last = payload_path_splited[-1]
|
|
445
|
+
_payload_text_extract = f"payload::jsonb->{_nested_no_last}->>{_last}"
|
|
446
|
+
else:
|
|
447
|
+
_payload_text_extract = f"payload::jsonb->>{payload_path}"
|
|
448
|
+
|
|
449
|
+
conditions = []
|
|
450
|
+
|
|
451
|
+
if value_type == 'numeric' and field_data.get('is_range', False) and len(
|
|
452
|
+
field_data.get('range_values', [])) == 2:
|
|
453
|
+
val1, val2 = field_data['range_values']
|
|
454
|
+
try:
|
|
455
|
+
num1 = float(val1)
|
|
456
|
+
num2 = float(val2)
|
|
457
|
+
if num1 > num2:
|
|
458
|
+
val1, val2 = val2, val1
|
|
459
|
+
except ValueError:
|
|
460
|
+
pass
|
|
461
|
+
if negative:
|
|
462
|
+
conditions.append(
|
|
463
|
+
f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float NOT BETWEEN {val1} AND {val2})")
|
|
464
|
+
else:
|
|
465
|
+
conditions.append(
|
|
466
|
+
f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float BETWEEN {val1} AND {val2})")
|
|
467
|
+
|
|
468
|
+
for value in values:
|
|
469
|
+
if value_type == 'datetime':
|
|
470
|
+
if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
|
|
471
|
+
range_values = field_data['range_values']
|
|
472
|
+
val1, val2 = range_values
|
|
473
|
+
if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
|
|
474
|
+
fmt1 = format_strings.get(val1)
|
|
475
|
+
fmt2 = format_strings.get(val2)
|
|
476
|
+
if fmt1 and fmt2:
|
|
477
|
+
if fmt1 == fmt2:
|
|
478
|
+
if val1 > val2:
|
|
479
|
+
val1, val2 = val2, val1
|
|
480
|
+
else:
|
|
481
|
+
try:
|
|
482
|
+
from datetime import datetime
|
|
483
|
+
dt1 = datetime.strptime(val1, fmt1.replace('YYYY', '%Y').replace('MM',
|
|
484
|
+
'%m').replace(
|
|
485
|
+
'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
|
|
486
|
+
'SS', '%S').replace('US', '%f'))
|
|
487
|
+
dt2 = datetime.strptime(val2, fmt2.replace('YYYY', '%Y').replace('MM',
|
|
488
|
+
'%m').replace(
|
|
489
|
+
'DD', '%d').replace('"T"HH24', 'T%H').replace('MI', '%M').replace(
|
|
490
|
+
'SS', '%S').replace('US', '%f'))
|
|
491
|
+
if dt1 > dt2:
|
|
492
|
+
val1, val2 = val2, val1
|
|
493
|
+
except Exception:
|
|
494
|
+
if val1 > val2:
|
|
495
|
+
val1, val2 = val2, val1
|
|
496
|
+
else:
|
|
497
|
+
if val1 > val2:
|
|
498
|
+
val1, val2 = val2, val1
|
|
499
|
+
|
|
500
|
+
start_value, end_value = val1, val2
|
|
501
|
+
start_format = format_strings.get(start_value)
|
|
502
|
+
end_format = format_strings.get(end_value)
|
|
503
|
+
|
|
504
|
+
if start_format and end_format:
|
|
505
|
+
if negative:
|
|
506
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
|
|
507
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND ({_payload_text_extract})::text NOT BETWEEN '{start_value}' AND '{end_value}')"
|
|
508
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
509
|
+
else:
|
|
510
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{start_format}') BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}') AND TO_TIMESTAMP('{end_value}', '{end_format}'))"
|
|
511
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND ({_payload_text_extract})::text BETWEEN '{start_value}' AND '{end_value}')"
|
|
512
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
513
|
+
else:
|
|
514
|
+
format_string = format_strings.get(value)
|
|
515
|
+
if format_string:
|
|
516
|
+
next_value = get_next_date_value(value, format_string)
|
|
517
|
+
|
|
518
|
+
if negative:
|
|
519
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND (TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{value}', '{format_string}') OR TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{next_value}', '{format_string}')))"
|
|
520
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND (({_payload_text_extract})::text < '{value}' OR ({_payload_text_extract})::text >= '{next_value}'))"
|
|
521
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
522
|
+
else:
|
|
523
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') >= TO_TIMESTAMP('{value}', '{format_string}') AND TO_TIMESTAMP({_payload_text_extract}, '{format_string}') < TO_TIMESTAMP('{next_value}', '{format_string}'))"
|
|
524
|
+
fallback_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND ({_payload_text_extract})::text >= '{value}' AND ({_payload_text_extract})::text < '{next_value}')"
|
|
525
|
+
conditions.append(f"({string_condition} OR {fallback_condition})")
|
|
526
|
+
else:
|
|
527
|
+
is_numeric = False
|
|
528
|
+
if value.isnumeric():
|
|
529
|
+
is_numeric = True
|
|
530
|
+
|
|
531
|
+
if negative:
|
|
532
|
+
array_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'array' AND NOT (payload::jsonb->{payload_path} @> '[\"{value}\"]'::jsonb))"
|
|
533
|
+
if '*' in value:
|
|
534
|
+
pattern = value.replace('*', '%')
|
|
535
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} NOT ILIKE '{pattern}')"
|
|
536
|
+
else:
|
|
537
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} != '{value}')"
|
|
538
|
+
|
|
539
|
+
if is_numeric:
|
|
540
|
+
number_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float != {value})"
|
|
541
|
+
conditions.append(
|
|
542
|
+
f"({array_condition} OR {string_condition} OR {number_condition})")
|
|
543
|
+
else:
|
|
544
|
+
conditions.append(f"({array_condition} OR {string_condition})")
|
|
545
|
+
else:
|
|
546
|
+
array_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'array' AND payload::jsonb->{payload_path} @> '[\"{value}\"]'::jsonb)"
|
|
547
|
+
if '*' in value:
|
|
548
|
+
pattern = value.replace('*', '%')
|
|
549
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} ILIKE '{pattern}')"
|
|
550
|
+
else:
|
|
551
|
+
string_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'string' AND {_payload_text_extract} = '{value}')"
|
|
552
|
+
direct_condition = f"(payload::jsonb->{payload_path} = '\"{value}\"'::jsonb)"
|
|
553
|
+
|
|
554
|
+
if is_numeric:
|
|
555
|
+
number_condition = f"(jsonb_typeof(payload::jsonb->{payload_path}) = 'number' AND ({_payload_text_extract})::float = {value})"
|
|
556
|
+
conditions.append(
|
|
557
|
+
f"({array_condition} OR {string_condition} OR {direct_condition} OR {number_condition})")
|
|
558
|
+
else:
|
|
559
|
+
conditions.append(
|
|
560
|
+
f"({array_condition} OR {string_condition} OR {direct_condition})")
|
|
561
|
+
|
|
562
|
+
if conditions:
|
|
563
|
+
if negative:
|
|
564
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
565
|
+
else:
|
|
566
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
567
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
568
|
+
else:
|
|
569
|
+
try:
|
|
570
|
+
if hasattr(table, field):
|
|
571
|
+
field_obj = getattr(table, field)
|
|
572
|
+
if hasattr(field_obj, 'type') and str(field_obj.type).lower() == 'jsonb':
|
|
573
|
+
conditions = []
|
|
574
|
+
for value in values:
|
|
575
|
+
if negative:
|
|
576
|
+
array_condition = f"(jsonb_typeof({field}) = 'array' AND NOT ({field} @> '[\"{value}\"]'::jsonb))"
|
|
577
|
+
object_condition = f"(jsonb_typeof({field}) = 'object' AND NOT ({field}::text ILIKE '%{value}%'))"
|
|
578
|
+
conditions.append(f"({array_condition} OR {object_condition})")
|
|
579
|
+
else:
|
|
580
|
+
array_condition = f"(jsonb_typeof({field}) = 'array' AND {field} @> '[\"{value}\"]'::jsonb)"
|
|
581
|
+
object_condition = f"(jsonb_typeof({field}) = 'object' AND {field}::text ILIKE '%{value}%')"
|
|
582
|
+
conditions.append(f"({array_condition} OR {object_condition})")
|
|
583
|
+
|
|
584
|
+
if conditions:
|
|
585
|
+
if negative:
|
|
586
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
587
|
+
else:
|
|
588
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
589
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
590
|
+
elif value_type == 'datetime':
|
|
591
|
+
conditions = []
|
|
592
|
+
|
|
593
|
+
if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
|
|
594
|
+
range_values = field_data['range_values']
|
|
595
|
+
val1, val2 = range_values
|
|
596
|
+
if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
|
|
597
|
+
fmt1 = format_strings.get(val1)
|
|
598
|
+
fmt2 = format_strings.get(val2)
|
|
599
|
+
if fmt1 and fmt2:
|
|
600
|
+
if fmt1 == fmt2:
|
|
601
|
+
if val1 > val2:
|
|
602
|
+
val1, val2 = val2, val1
|
|
603
|
+
else:
|
|
604
|
+
try:
|
|
605
|
+
from datetime import datetime
|
|
606
|
+
dt1 = datetime.strptime(val1,
|
|
607
|
+
fmt1.replace('YYYY', '%Y').replace('MM',
|
|
608
|
+
'%m').replace(
|
|
609
|
+
'DD', '%d').replace('"T"HH24',
|
|
610
|
+
'T%H').replace('MI',
|
|
611
|
+
'%M').replace(
|
|
612
|
+
'SS', '%S').replace('US', '%f'))
|
|
613
|
+
dt2 = datetime.strptime(val2,
|
|
614
|
+
fmt2.replace('YYYY', '%Y').replace('MM',
|
|
615
|
+
'%m').replace(
|
|
616
|
+
'DD', '%d').replace('"T"HH24',
|
|
617
|
+
'T%H').replace('MI',
|
|
618
|
+
'%M').replace(
|
|
619
|
+
'SS', '%S').replace('US', '%f'))
|
|
620
|
+
if dt1 > dt2:
|
|
621
|
+
val1, val2 = val2, val1
|
|
622
|
+
except Exception:
|
|
623
|
+
if val1 > val2:
|
|
624
|
+
val1, val2 = val2, val1
|
|
625
|
+
else:
|
|
626
|
+
if val1 > val2:
|
|
627
|
+
val1, val2 = val2, val1
|
|
628
|
+
|
|
629
|
+
start_value, end_value = val1, val2
|
|
630
|
+
start_format = format_strings.get(start_value)
|
|
631
|
+
end_format = format_strings.get(end_value)
|
|
632
|
+
|
|
633
|
+
if start_format and end_format:
|
|
634
|
+
if negative:
|
|
635
|
+
conditions.append(
|
|
636
|
+
f"({field}::timestamp NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
|
|
637
|
+
else:
|
|
638
|
+
conditions.append(
|
|
639
|
+
f"({field}::timestamp BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
|
|
640
|
+
else:
|
|
641
|
+
for value in values:
|
|
642
|
+
format_string = format_strings.get(value)
|
|
643
|
+
if format_string:
|
|
644
|
+
next_value = get_next_date_value(value, format_string)
|
|
645
|
+
|
|
646
|
+
if negative:
|
|
647
|
+
conditions.append(
|
|
648
|
+
f"({field}::timestamp < TO_TIMESTAMP('{value}', '{format_string}')::timestamp OR {field}::timestamp >= TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
|
|
649
|
+
else:
|
|
650
|
+
conditions.append(
|
|
651
|
+
f"({field}::timestamp >= TO_TIMESTAMP('{value}', '{format_string}')::timestamp AND {field}::timestamp < TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
|
|
652
|
+
|
|
653
|
+
if conditions:
|
|
654
|
+
if negative:
|
|
655
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
656
|
+
else:
|
|
657
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
658
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
659
|
+
elif value_type == 'numeric':
|
|
660
|
+
conditions = []
|
|
661
|
+
|
|
662
|
+
if field_data.get('is_range', False) and len(field_data.get('range_values', [])) == 2:
|
|
663
|
+
range_values = field_data['range_values']
|
|
664
|
+
val1, val2 = range_values
|
|
665
|
+
try:
|
|
666
|
+
num1 = float(val1)
|
|
667
|
+
num2 = float(val2)
|
|
668
|
+
if num1 > num2:
|
|
669
|
+
val1, val2 = val2, val1
|
|
670
|
+
except ValueError:
|
|
671
|
+
pass
|
|
672
|
+
|
|
673
|
+
if negative:
|
|
674
|
+
conditions.append(f"(CAST({field} AS FLOAT) NOT BETWEEN {val1} AND {val2})")
|
|
675
|
+
else:
|
|
676
|
+
conditions.append(f"(CAST({field} AS FLOAT) BETWEEN {val1} AND {val2})")
|
|
677
|
+
else:
|
|
678
|
+
for value in values:
|
|
679
|
+
if negative:
|
|
680
|
+
conditions.append(f"(CAST({field} AS FLOAT) != {value})")
|
|
681
|
+
else:
|
|
682
|
+
conditions.append(f"(CAST({field} AS FLOAT) = {value})")
|
|
683
|
+
|
|
684
|
+
if conditions:
|
|
685
|
+
if negative:
|
|
686
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
687
|
+
else:
|
|
688
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
689
|
+
|
|
690
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
691
|
+
elif value_type == 'boolean':
|
|
692
|
+
conditions = []
|
|
693
|
+
for value in values:
|
|
694
|
+
bool_value = value.lower()
|
|
695
|
+
if negative:
|
|
696
|
+
conditions.append(f"(CAST({field} AS BOOLEAN) != {bool_value})")
|
|
697
|
+
else:
|
|
698
|
+
conditions.append(f"(CAST({field} AS BOOLEAN) = {bool_value})")
|
|
699
|
+
|
|
700
|
+
if conditions:
|
|
701
|
+
if negative:
|
|
702
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
703
|
+
else:
|
|
704
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
705
|
+
statement = statement.where(text(join_operator.join(conditions)))
|
|
706
|
+
else:
|
|
707
|
+
field_obj = getattr(table, field)
|
|
708
|
+
is_timestamp = hasattr(field_obj, 'type') and str(field_obj.type).lower().startswith(
|
|
709
|
+
'timestamp')
|
|
710
|
+
|
|
711
|
+
if is_timestamp:
|
|
712
|
+
conditions = []
|
|
713
|
+
for value in values:
|
|
714
|
+
if negative:
|
|
715
|
+
conditions.append(f"{field}::text != '{value}'")
|
|
716
|
+
else:
|
|
717
|
+
conditions.append(f"{field}::text = '{value}'")
|
|
718
|
+
|
|
719
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
720
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
721
|
+
else:
|
|
722
|
+
conditions = []
|
|
723
|
+
for value in values:
|
|
724
|
+
if '*' in value:
|
|
725
|
+
pattern = value.replace('*', '%')
|
|
726
|
+
if negative:
|
|
727
|
+
conditions.append(f"{field} NOT ILIKE '{pattern}'")
|
|
728
|
+
else:
|
|
729
|
+
conditions.append(f"{field} ILIKE '{pattern}'")
|
|
730
|
+
else:
|
|
731
|
+
if negative:
|
|
732
|
+
conditions.append(f"{field} != '{value}'")
|
|
733
|
+
else:
|
|
734
|
+
conditions.append(f"{field} = '{value}'")
|
|
735
|
+
if negative:
|
|
736
|
+
join_operator = ' AND '
|
|
737
|
+
else:
|
|
738
|
+
join_operator = ' AND ' if operation == 'AND' else ' OR '
|
|
739
|
+
statement = statement.where(text('(' + join_operator.join(conditions) + ')'))
|
|
740
|
+
else:
|
|
741
|
+
conditions = []
|
|
742
|
+
for value in values:
|
|
743
|
+
if value_type == 'datetime':
|
|
744
|
+
|
|
745
|
+
if field_data.get('is_range', False) and len(
|
|
746
|
+
field_data.get('range_values', [])) == 2:
|
|
747
|
+
range_values = field_data['range_values']
|
|
748
|
+
val1, val2 = range_values
|
|
749
|
+
if is_date_time_value(val1)[0] and is_date_time_value(val2)[0]:
|
|
750
|
+
fmt1 = format_strings.get(val1)
|
|
751
|
+
fmt2 = format_strings.get(val2)
|
|
752
|
+
if fmt1 and fmt2:
|
|
753
|
+
if fmt1 == fmt2:
|
|
754
|
+
if val1 > val2:
|
|
755
|
+
val1, val2 = val2, val1
|
|
756
|
+
else:
|
|
757
|
+
try:
|
|
758
|
+
from datetime import datetime
|
|
759
|
+
dt1 = datetime.strptime(val1,
|
|
760
|
+
fmt1.replace('YYYY', '%Y').replace('MM',
|
|
761
|
+
'%m').replace(
|
|
762
|
+
'DD', '%d').replace('"T"HH24',
|
|
763
|
+
'T%H').replace(
|
|
764
|
+
'MI', '%M').replace('SS',
|
|
765
|
+
'%S').replace(
|
|
766
|
+
'US', '%f'))
|
|
767
|
+
dt2 = datetime.strptime(val2,
|
|
768
|
+
fmt2.replace('YYYY', '%Y').replace('MM',
|
|
769
|
+
'%m').replace(
|
|
770
|
+
'DD', '%d').replace('"T"HH24',
|
|
771
|
+
'T%H').replace(
|
|
772
|
+
'MI', '%M').replace('SS',
|
|
773
|
+
'%S').replace(
|
|
774
|
+
'US', '%f'))
|
|
775
|
+
if dt1 > dt2:
|
|
776
|
+
val1, val2 = val2, val1
|
|
777
|
+
except Exception:
|
|
778
|
+
if val1 > val2:
|
|
779
|
+
val1, val2 = val2, val1
|
|
780
|
+
else:
|
|
781
|
+
if val1 > val2:
|
|
782
|
+
val1, val2 = val2, val1
|
|
783
|
+
|
|
784
|
+
start_value, end_value = val1, val2
|
|
785
|
+
start_format = format_strings.get(start_value)
|
|
786
|
+
end_format = format_strings.get(end_value)
|
|
787
|
+
|
|
788
|
+
if start_format and end_format:
|
|
789
|
+
if negative:
|
|
790
|
+
conditions.append(
|
|
791
|
+
f"(payload::jsonb->'{field}'::timestamp NOT BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
|
|
792
|
+
else:
|
|
793
|
+
conditions.append(
|
|
794
|
+
f"(payload::jsonb->'{field}'::timestamp BETWEEN TO_TIMESTAMP('{start_value}', '{start_format}')::timestamp AND TO_TIMESTAMP('{end_value}', '{end_format}')::timestamp)")
|
|
795
|
+
else:
|
|
796
|
+
format_string = format_strings.get(value)
|
|
797
|
+
if format_string:
|
|
798
|
+
next_value = get_next_date_value(value, format_string)
|
|
799
|
+
|
|
800
|
+
if negative:
|
|
801
|
+
conditions.append(
|
|
802
|
+
f"(payload::jsonb->'{field}'::timestamp < TO_TIMESTAMP('{value}', '{format_string}')::timestamp OR payload::jsonb->'{field}'::timestamp >= TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
|
|
803
|
+
else:
|
|
804
|
+
conditions.append(
|
|
805
|
+
f"(payload::jsonb->'{field}'::timestamp >= TO_TIMESTAMP('{value}', '{format_string}')::timestamp AND payload::jsonb->'{field}'::timestamp < TO_TIMESTAMP('{next_value}', '{format_string}')::timestamp)")
|
|
806
|
+
elif value_type == 'numeric':
|
|
807
|
+
if field_data.get('is_range', False) and len(
|
|
808
|
+
field_data.get('range_values', [])) == 2:
|
|
809
|
+
range_values = field_data['range_values']
|
|
810
|
+
val1, val2 = range_values
|
|
811
|
+
try:
|
|
812
|
+
num1 = float(val1)
|
|
813
|
+
num2 = float(val2)
|
|
814
|
+
if num1 > num2:
|
|
815
|
+
val1, val2 = val2, val1
|
|
816
|
+
except ValueError:
|
|
817
|
+
pass
|
|
818
|
+
|
|
819
|
+
if negative:
|
|
820
|
+
conditions.append(
|
|
821
|
+
f"(jsonb_typeof(payload::jsonb->'{field}') = 'number' AND (payload::jsonb->'{field}')::float NOT BETWEEN {val1} AND {val2})")
|
|
822
|
+
else:
|
|
823
|
+
conditions.append(
|
|
824
|
+
f"(jsonb_typeof(payload::jsonb->'{field}') = 'number' AND (payload::jsonb->'{field}')::float BETWEEN {val1} AND {val2})")
|
|
825
|
+
elif value_type == 'boolean':
|
|
826
|
+
bool_value = value.lower()
|
|
827
|
+
if negative:
|
|
828
|
+
conditions.append(
|
|
829
|
+
f"(jsonb_typeof(payload::jsonb->'{field}') = 'boolean' AND (payload::jsonb->'{field}')::boolean != {bool_value})")
|
|
830
|
+
else:
|
|
831
|
+
conditions.append(
|
|
832
|
+
f"(jsonb_typeof(payload::jsonb->'{field}') = 'boolean' AND (payload::jsonb->'{field}')::boolean = {bool_value})")
|
|
833
|
+
else:
|
|
834
|
+
if '*' in value:
|
|
835
|
+
pattern = value.replace('*', '%')
|
|
836
|
+
if negative:
|
|
837
|
+
conditions.append(f"(payload::jsonb->>'{field}') NOT ILIKE '{pattern}'")
|
|
838
|
+
else:
|
|
839
|
+
conditions.append(f"(payload::jsonb->>'{field}') ILIKE '{pattern}'")
|
|
840
|
+
else:
|
|
841
|
+
if negative:
|
|
842
|
+
conditions.append(f"payload::jsonb->'{field}' != '\"{value}\"'::jsonb")
|
|
843
|
+
else:
|
|
844
|
+
conditions.append(f"payload::jsonb->'{field}' = '\"{value}\"'::jsonb")
|
|
845
|
+
|
|
846
|
+
if conditions:
|
|
847
|
+
if negative:
|
|
848
|
+
join_operator = " OR " if operation == 'AND' else " AND "
|
|
849
|
+
else:
|
|
850
|
+
join_operator = " AND " if operation == 'AND' else " OR "
|
|
851
|
+
statement = statement.where(text("(" + join_operator.join(conditions) + ")"))
|
|
852
|
+
except Exception as e:
|
|
853
|
+
print(f"Error handling field {field}: {e}")
|
|
854
|
+
|
|
855
|
+
if query.filter_schema_names:
|
|
856
|
+
if 'meta' in query.filter_schema_names:
|
|
857
|
+
query.filter_schema_names.remove('meta')
|
|
858
|
+
if query.filter_schema_names:
|
|
859
|
+
statement = statement.where(
|
|
860
|
+
text("(payload ->> 'schema_shortname') IN ({})".format(
|
|
861
|
+
', '.join(f"'{item}'" for item in query.filter_schema_names)
|
|
862
|
+
))
|
|
863
|
+
)
|
|
864
|
+
if query.filter_shortnames:
|
|
865
|
+
statement = statement.where(
|
|
866
|
+
col(table.shortname).in_(query.filter_shortnames)
|
|
867
|
+
)
|
|
868
|
+
if query.filter_types:
|
|
869
|
+
statement = statement.where(
|
|
870
|
+
col(table.resource_type).in_(query.filter_types)
|
|
871
|
+
)
|
|
872
|
+
if query.filter_tags:
|
|
873
|
+
statement = statement.where(
|
|
874
|
+
col(table.tags).in_(query.filter_tags)
|
|
875
|
+
)
|
|
876
|
+
if query.from_date:
|
|
877
|
+
statement = statement.where(table.created_at >= query.from_date)
|
|
878
|
+
if query.to_date:
|
|
879
|
+
statement = statement.where(table.created_at <= query.to_date)
|
|
880
|
+
|
|
881
|
+
try:
|
|
882
|
+
if not is_for_count:
|
|
883
|
+
if query.sort_by:
|
|
884
|
+
if query.sort_by.startswith('attributes.'):
|
|
885
|
+
query.sort_by = query.sort_by[11:]
|
|
886
|
+
if "." in query.sort_by:
|
|
887
|
+
# Normalize JSON path for sorting as well (handle leading '@' and body.* shortcut)
|
|
888
|
+
sort_expression = transform_keys_to_sql(
|
|
889
|
+
query.sort_by.replace("@", "", 1) if query.sort_by.startswith("@") else (
|
|
890
|
+
f"payload.{query.sort_by}" if query.sort_by.startswith("body.") else query.sort_by))
|
|
891
|
+
sort_type = " DESC" if query.sort_type == SortType.descending else ""
|
|
892
|
+
sort_expression = f"CASE WHEN ({sort_expression}) ~ '^[0-9]+$' THEN ({sort_expression})::float END {sort_type}, ({sort_expression}) {sort_type}"
|
|
893
|
+
statement = statement.order_by(text(sort_expression))
|
|
894
|
+
else:
|
|
895
|
+
if query.sort_type == SortType.ascending:
|
|
896
|
+
statement = statement.order_by(getattr(table, query.sort_by))
|
|
897
|
+
if query.sort_type == SortType.descending:
|
|
898
|
+
statement = statement.order_by(getattr(table, query.sort_by).desc())
|
|
899
|
+
|
|
900
|
+
except Exception as e:
|
|
901
|
+
print("[!set_sql_statement_from_query]", e)
|
|
902
|
+
|
|
903
|
+
if not is_for_count:
|
|
904
|
+
if query.offset:
|
|
905
|
+
statement = statement.offset(query.offset)
|
|
906
|
+
|
|
907
|
+
statement = statement.limit(query.limit)
|
|
908
|
+
|
|
909
|
+
if query.type == QueryType.tags and not is_for_count and hasattr(table, 'tags'):
|
|
910
|
+
if query.retrieve_json_payload:
|
|
911
|
+
statement = select(
|
|
912
|
+
func.jsonb_array_elements_text(col(table.tags)).label('tag'),
|
|
913
|
+
func.count('*').label('count')
|
|
914
|
+
).where(col(table.uuid).in_(
|
|
915
|
+
select(col(table.uuid)).where(statement.whereclause) # type: ignore
|
|
916
|
+
)).group_by('tag')
|
|
917
|
+
else:
|
|
918
|
+
statement = select(
|
|
919
|
+
func.jsonb_array_elements_text(col(table.tags)).label('tag')
|
|
920
|
+
).where(col(table.uuid).in_(
|
|
921
|
+
select(col(table.uuid)).where(statement.whereclause) # type: ignore
|
|
922
|
+
)).distinct()
|
|
923
|
+
|
|
924
|
+
return statement
|
|
925
|
+
|
|
926
|
+
|
|
927
|
+
class SQLAdapter(BaseDataAdapter):
|
|
928
|
+
_engine = None
|
|
929
|
+
_async_session_factory = None
|
|
930
|
+
session: Session
|
|
931
|
+
async_session: sessionmaker
|
|
932
|
+
engine: Any
|
|
933
|
+
|
|
934
|
+
def locators_query(self, query: api.Query) -> tuple[int, list[core.Locator]]:
|
|
935
|
+
locators: list[core.Locator] = []
|
|
936
|
+
total: int = 0
|
|
937
|
+
match query.type:
|
|
938
|
+
case api.QueryType.subpath:
|
|
939
|
+
pass
|
|
940
|
+
# !TODO finsih...
|
|
941
|
+
return total, locators
|
|
942
|
+
|
|
943
|
+
def folder_path(
|
|
944
|
+
self,
|
|
945
|
+
space_name: str,
|
|
946
|
+
subpath: str,
|
|
947
|
+
shortname: str,
|
|
948
|
+
) -> str:
|
|
949
|
+
return ""
|
|
950
|
+
|
|
951
|
+
async def otp_created_since(self, key: str) -> int | None:
|
|
952
|
+
async with self.get_session() as session:
|
|
953
|
+
result = await session.execute(select(OTP).where(OTP.key == key))
|
|
954
|
+
otp_entry = result.scalar_one_or_none()
|
|
955
|
+
|
|
956
|
+
if otp_entry:
|
|
957
|
+
return int((datetime.now() - otp_entry.timestamp).total_seconds())
|
|
958
|
+
|
|
959
|
+
return None
|
|
960
|
+
|
|
961
|
+
async def save_otp(
|
|
962
|
+
self,
|
|
963
|
+
key: str,
|
|
964
|
+
otp: str,
|
|
965
|
+
):
|
|
966
|
+
try:
|
|
967
|
+
async with self.get_session() as session:
|
|
968
|
+
otp_entry = OTP(
|
|
969
|
+
key=key,
|
|
970
|
+
value={"otp": otp},
|
|
971
|
+
timestamp=datetime.now()
|
|
972
|
+
)
|
|
973
|
+
session.add(otp_entry)
|
|
974
|
+
except Exception as e:
|
|
975
|
+
async with self.get_session() as session:
|
|
976
|
+
if "UniqueViolationError" in str(e) or "unique constraint" in str(e).lower():
|
|
977
|
+
await session.rollback()
|
|
978
|
+
statement = delete(OTP).where(col(OTP.key) == key)
|
|
979
|
+
await session.execute(statement)
|
|
980
|
+
|
|
981
|
+
otp_entry = OTP(
|
|
982
|
+
key=key,
|
|
983
|
+
value={"otp": otp},
|
|
984
|
+
timestamp=datetime.now()
|
|
985
|
+
)
|
|
986
|
+
session.add(otp_entry)
|
|
987
|
+
else:
|
|
988
|
+
await session.rollback()
|
|
989
|
+
raise e
|
|
990
|
+
|
|
991
|
+
async def get_otp(
|
|
992
|
+
self,
|
|
993
|
+
key: str,
|
|
994
|
+
):
|
|
995
|
+
async with self.get_session() as session:
|
|
996
|
+
result = await session.execute(select(OTP).where(OTP.key == key))
|
|
997
|
+
otp_entry = result.scalar_one_or_none()
|
|
998
|
+
|
|
999
|
+
if otp_entry:
|
|
1000
|
+
if (datetime.now() - otp_entry.timestamp).total_seconds() > settings.otp_token_ttl:
|
|
1001
|
+
await session.delete(otp_entry)
|
|
1002
|
+
return None
|
|
1003
|
+
return otp_entry.value.get("otp")
|
|
1004
|
+
return None
|
|
1005
|
+
|
|
1006
|
+
async def delete_otp(self, key: str):
|
|
1007
|
+
async with self.get_session() as session:
|
|
1008
|
+
statement = delete(OTP).where(col(OTP.key) == key)
|
|
1009
|
+
await session.execute(statement)
|
|
1010
|
+
|
|
1011
|
+
def metapath(self,
|
|
1012
|
+
space_name: str,
|
|
1013
|
+
subpath: str,
|
|
1014
|
+
shortname: str,
|
|
1015
|
+
class_type: Type[MetaChild],
|
|
1016
|
+
schema_shortname: str | None = None,
|
|
1017
|
+
) -> tuple[Path, str]:
|
|
1018
|
+
return (Path(), "")
|
|
1019
|
+
|
|
1020
|
+
def __init__(self):
|
|
1021
|
+
if SQLAdapter._engine is None:
|
|
1022
|
+
if "sqlite" in settings.database_driver:
|
|
1023
|
+
url = URL.create(
|
|
1024
|
+
drivername=settings.database_driver,
|
|
1025
|
+
database=settings.database_name,
|
|
1026
|
+
)
|
|
1027
|
+
else:
|
|
1028
|
+
url = URL.create(
|
|
1029
|
+
drivername=settings.database_driver,
|
|
1030
|
+
host=settings.database_host,
|
|
1031
|
+
port=settings.database_port,
|
|
1032
|
+
username=settings.database_username,
|
|
1033
|
+
password=settings.database_password,
|
|
1034
|
+
database=settings.database_name,
|
|
1035
|
+
)
|
|
1036
|
+
|
|
1037
|
+
SQLAdapter._engine = create_async_engine(
|
|
1038
|
+
url,
|
|
1039
|
+
echo=False,
|
|
1040
|
+
pool_pre_ping=True,
|
|
1041
|
+
pool_size=settings.database_pool_size,
|
|
1042
|
+
max_overflow=settings.database_max_overflow,
|
|
1043
|
+
pool_timeout=settings.database_pool_timeout,
|
|
1044
|
+
pool_recycle=settings.database_pool_recycle,
|
|
1045
|
+
)
|
|
1046
|
+
self.engine = SQLAdapter._engine
|
|
1047
|
+
try:
|
|
1048
|
+
if SQLAdapter._async_session_factory is None:
|
|
1049
|
+
SQLAdapter._async_session_factory = sessionmaker(
|
|
1050
|
+
self.engine, class_=AsyncSession, expire_on_commit=False
|
|
1051
|
+
) # type: ignore
|
|
1052
|
+
self.async_session = SQLAdapter._async_session_factory
|
|
1053
|
+
except Exception as e:
|
|
1054
|
+
print("[!FATAL]", e)
|
|
1055
|
+
sys.exit(127)
|
|
1056
|
+
|
|
1057
|
+
async def test_connection(self):
|
|
1058
|
+
try:
|
|
1059
|
+
async with self.get_session() as session:
|
|
1060
|
+
(await session.execute(text("SELECT 1"))).one_or_none()
|
|
1061
|
+
except Exception as e:
|
|
1062
|
+
print("[!FATAL]", e)
|
|
1063
|
+
sys.exit(127)
|
|
1064
|
+
|
|
1065
|
+
@asynccontextmanager
|
|
1066
|
+
async def get_session(self):
|
|
1067
|
+
async_session = self.async_session()
|
|
1068
|
+
try:
|
|
1069
|
+
yield async_session
|
|
1070
|
+
await async_session.commit()
|
|
1071
|
+
finally:
|
|
1072
|
+
await async_session.close() # type: ignore
|
|
1073
|
+
|
|
1074
|
+
def get_table(
|
|
1075
|
+
self, class_type: Type[MetaChild]
|
|
1076
|
+
) -> Type[Roles] | Type[Permissions] | Type[Users] | Type[Spaces] | Type[Locks] | Type[Attachments] | Type[Entries]:
|
|
1077
|
+
|
|
1078
|
+
match class_type:
|
|
1079
|
+
case core.Role:
|
|
1080
|
+
return Roles
|
|
1081
|
+
case core.Permission:
|
|
1082
|
+
return Permissions
|
|
1083
|
+
case core.User:
|
|
1084
|
+
return Users
|
|
1085
|
+
case core.Space:
|
|
1086
|
+
return Spaces
|
|
1087
|
+
case core.Lock:
|
|
1088
|
+
return Locks
|
|
1089
|
+
case (
|
|
1090
|
+
core.Alteration
|
|
1091
|
+
| core.Media
|
|
1092
|
+
| core.Lock
|
|
1093
|
+
| core.Comment
|
|
1094
|
+
| core.Reply
|
|
1095
|
+
| core.Reaction
|
|
1096
|
+
| core.Json
|
|
1097
|
+
| core.DataAsset
|
|
1098
|
+
):
|
|
1099
|
+
return Attachments
|
|
1100
|
+
return Entries
|
|
1101
|
+
|
|
1102
|
+
def get_base_model(self, class_type: Type[MetaChild], data,
|
|
1103
|
+
update=None) -> Roles | Permissions | Users | Spaces | Locks | Attachments | Entries:
|
|
1104
|
+
match class_type:
|
|
1105
|
+
case core.User:
|
|
1106
|
+
return Users.model_validate(data, update=update)
|
|
1107
|
+
case core.Role:
|
|
1108
|
+
return Roles.model_validate(data, update=update)
|
|
1109
|
+
case core.Permission:
|
|
1110
|
+
return Permissions.model_validate(data, update=update)
|
|
1111
|
+
case core.Space:
|
|
1112
|
+
return Spaces.model_validate(data, update=update)
|
|
1113
|
+
case (
|
|
1114
|
+
core.Alteration
|
|
1115
|
+
| core.Media
|
|
1116
|
+
| core.Lock
|
|
1117
|
+
| core.Comment
|
|
1118
|
+
| core.Reply
|
|
1119
|
+
| core.Reaction
|
|
1120
|
+
| core.Json
|
|
1121
|
+
| core.DataAsset
|
|
1122
|
+
):
|
|
1123
|
+
if data.get("media", None) is None:
|
|
1124
|
+
data["media"] = None
|
|
1125
|
+
return Attachments.model_validate(data, update=update)
|
|
1126
|
+
return Entries.model_validate(data, update=update)
|
|
1127
|
+
|
|
1128
|
+
async def get_entry_attachments(
|
|
1129
|
+
self,
|
|
1130
|
+
subpath: str,
|
|
1131
|
+
attachments_path: Path,
|
|
1132
|
+
filter_types: list | None = None,
|
|
1133
|
+
include_fields: list | None = None,
|
|
1134
|
+
filter_shortnames: list | None = None,
|
|
1135
|
+
retrieve_json_payload: bool = False,
|
|
1136
|
+
) -> dict:
|
|
1137
|
+
attachments_dict: dict[str, list] = {}
|
|
1138
|
+
async with self.get_session() as session:
|
|
1139
|
+
if not subpath.startswith("/"):
|
|
1140
|
+
subpath = f"/{subpath}"
|
|
1141
|
+
|
|
1142
|
+
if str(settings.spaces_folder) in str(attachments_path):
|
|
1143
|
+
attachments_path = attachments_path.relative_to(settings.spaces_folder)
|
|
1144
|
+
space_name = attachments_path.parts[0]
|
|
1145
|
+
shortname = attachments_path.parts[-1]
|
|
1146
|
+
statement = (
|
|
1147
|
+
select(Attachments)
|
|
1148
|
+
.where(Attachments.space_name == space_name)
|
|
1149
|
+
.where(Attachments.subpath == f"{subpath}/{shortname}".replace('//', '/'))
|
|
1150
|
+
)
|
|
1151
|
+
results = list((await session.execute(statement)).all())
|
|
1152
|
+
|
|
1153
|
+
if len(results) == 0:
|
|
1154
|
+
return attachments_dict
|
|
1155
|
+
|
|
1156
|
+
for idx, item in enumerate(results):
|
|
1157
|
+
item = item[0]
|
|
1158
|
+
attachment_record = Attachments.model_validate(item)
|
|
1159
|
+
attachment_json = attachment_record.model_dump()
|
|
1160
|
+
attachment = {
|
|
1161
|
+
"resource_type": attachment_json["resource_type"],
|
|
1162
|
+
"uuid": attachment_json["uuid"],
|
|
1163
|
+
"shortname": attachment_json["shortname"],
|
|
1164
|
+
"subpath": "/".join(attachment_json["subpath"].split("/")[:-1]) # join(),
|
|
1165
|
+
}
|
|
1166
|
+
del attachment_json["resource_type"]
|
|
1167
|
+
del attachment_json["uuid"]
|
|
1168
|
+
del attachment_json["media"]
|
|
1169
|
+
del attachment_json["shortname"]
|
|
1170
|
+
del attachment_json["subpath"]
|
|
1171
|
+
del attachment_json["relationships"]
|
|
1172
|
+
del attachment_json["acl"]
|
|
1173
|
+
del attachment_json["space_name"]
|
|
1174
|
+
attachment["attributes"] = {**attachment_json}
|
|
1175
|
+
if attachment_record.resource_type in attachments_dict:
|
|
1176
|
+
attachments_dict[attachment_record.resource_type].append(attachment)
|
|
1177
|
+
else:
|
|
1178
|
+
attachments_dict[attachment_record.resource_type] = [attachment]
|
|
1179
|
+
|
|
1180
|
+
return attachments_dict
|
|
1181
|
+
|
|
1182
|
+
def payload_path(
|
|
1183
|
+
self,
|
|
1184
|
+
space_name: str,
|
|
1185
|
+
subpath: str,
|
|
1186
|
+
class_type: Type[MetaChild],
|
|
1187
|
+
schema_shortname: str | None = None, ) -> Path:
|
|
1188
|
+
"""Construct the full path of the meta file"""
|
|
1189
|
+
path = settings.spaces_folder / space_name
|
|
1190
|
+
|
|
1191
|
+
subpath = copy(subpath)
|
|
1192
|
+
if subpath[0] == "/":
|
|
1193
|
+
subpath = f".{subpath}"
|
|
1194
|
+
if issubclass(class_type, core.Attachment):
|
|
1195
|
+
[parent_subpath, parent_name] = subpath.rsplit("/", 1)
|
|
1196
|
+
# schema_shortname = (
|
|
1197
|
+
# "." + dto.schema_shortname if dto.schema_shortname != "meta" else ""
|
|
1198
|
+
# )
|
|
1199
|
+
schema_shortname = ""
|
|
1200
|
+
attachment_folder = f"{parent_name}/attachments{schema_shortname}.{class_type.__name__.lower()}"
|
|
1201
|
+
path = path / parent_subpath / ".dm" / attachment_folder
|
|
1202
|
+
else:
|
|
1203
|
+
path = path / subpath
|
|
1204
|
+
return path
|
|
1205
|
+
|
|
1206
|
+
async def db_load_or_none(
|
|
1207
|
+
self,
|
|
1208
|
+
space_name: str,
|
|
1209
|
+
subpath: str,
|
|
1210
|
+
shortname: str,
|
|
1211
|
+
class_type: Type[MetaChild],
|
|
1212
|
+
user_shortname: str | None = None,
|
|
1213
|
+
schema_shortname: str | None = None,
|
|
1214
|
+
) -> Attachments | Entries | Locks | Permissions | Roles | Spaces | Users | None:
|
|
1215
|
+
"""Load a Meta Json according to the reuqested Class type"""
|
|
1216
|
+
if not subpath.startswith("/"):
|
|
1217
|
+
subpath = f"/{subpath}"
|
|
1218
|
+
|
|
1219
|
+
shortname = shortname.replace("/", "")
|
|
1220
|
+
|
|
1221
|
+
table = self.get_table(class_type)
|
|
1222
|
+
|
|
1223
|
+
if table is Attachments:
|
|
1224
|
+
statement = select(table).options(defer(Attachments.media)) # type: ignore
|
|
1225
|
+
else:
|
|
1226
|
+
statement = select(table)
|
|
1227
|
+
statement = statement.where(col(table.space_name) == space_name).where(table.shortname == shortname)
|
|
1228
|
+
|
|
1229
|
+
if table in [Entries, Attachments]:
|
|
1230
|
+
statement = statement.where(col(table.subpath) == subpath)
|
|
1231
|
+
|
|
1232
|
+
try:
|
|
1233
|
+
async with self.get_session() as session:
|
|
1234
|
+
return (await session.execute(statement)).scalars().one_or_none() # type: ignore
|
|
1235
|
+
except Exception as e:
|
|
1236
|
+
print("[!load_or_none]", e)
|
|
1237
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
1238
|
+
return None
|
|
1239
|
+
|
|
1240
|
+
async def get_entry_by_criteria(self, criteria: dict, table: Any = None) -> core.Record | None:
|
|
1241
|
+
async with self.get_session() as session:
|
|
1242
|
+
if table is None:
|
|
1243
|
+
tables = [Entries, Users, Roles, Permissions, Spaces, Attachments]
|
|
1244
|
+
for _table in tables:
|
|
1245
|
+
statement = select(_table)
|
|
1246
|
+
for k, v in criteria.items():
|
|
1247
|
+
# Prefer SQLAlchemy column expressions over raw text to avoid injection
|
|
1248
|
+
if hasattr(_table, k):
|
|
1249
|
+
column = getattr(_table, k)
|
|
1250
|
+
if isinstance(v, str):
|
|
1251
|
+
statement = statement.where(cast(column, String).like(bindparam(k)))
|
|
1252
|
+
statement = statement.params(**{k: f"{v}%"})
|
|
1253
|
+
else:
|
|
1254
|
+
statement = statement.where(column == bindparam(k))
|
|
1255
|
+
statement = statement.params(**{k: v})
|
|
1256
|
+
else:
|
|
1257
|
+
# Unknown column name; skip to avoid potential SQL injection via dynamic identifiers
|
|
1258
|
+
continue
|
|
1259
|
+
|
|
1260
|
+
_result = (await session.execute(statement)).scalars().first()
|
|
1261
|
+
|
|
1262
|
+
if _result is None:
|
|
1263
|
+
continue
|
|
1264
|
+
|
|
1265
|
+
core_model_class_1: core.Meta = getattr(sys.modules["models.core"],
|
|
1266
|
+
camel_case(_result.resource_type))
|
|
1267
|
+
result = core_model_class_1.model_validate(
|
|
1268
|
+
_result.model_dump()
|
|
1269
|
+
).to_record(_result.subpath, _result.shortname)
|
|
1270
|
+
|
|
1271
|
+
result.attributes = {**result.attributes, "space_name": _result.space_name}
|
|
1272
|
+
|
|
1273
|
+
return result
|
|
1274
|
+
return None
|
|
1275
|
+
else:
|
|
1276
|
+
statement = select(table)
|
|
1277
|
+
for k, v in criteria.items():
|
|
1278
|
+
if hasattr(table, k):
|
|
1279
|
+
column = getattr(table, k)
|
|
1280
|
+
if isinstance(v, str):
|
|
1281
|
+
statement = statement.where(cast(column, String) == bindparam(k))
|
|
1282
|
+
statement = statement.params(**{k: v})
|
|
1283
|
+
else:
|
|
1284
|
+
statement = statement.where(column == bindparam(k))
|
|
1285
|
+
statement = statement.params(**{k: v})
|
|
1286
|
+
else:
|
|
1287
|
+
# Unknown column name; skip
|
|
1288
|
+
continue
|
|
1289
|
+
|
|
1290
|
+
_result = (await session.execute(statement)).scalars().first()
|
|
1291
|
+
|
|
1292
|
+
if _result is None:
|
|
1293
|
+
return None
|
|
1294
|
+
|
|
1295
|
+
core_model_class_2: core.Meta = getattr(sys.modules["models.core"],
|
|
1296
|
+
camel_case(_result.resource_type))
|
|
1297
|
+
|
|
1298
|
+
result = core_model_class_2.model_validate(
|
|
1299
|
+
_result.model_dump()
|
|
1300
|
+
).to_record(_result.subpath, _result.shortname)
|
|
1301
|
+
result.attributes = {**result.attributes, "space_name": _result.space_name}
|
|
1302
|
+
|
|
1303
|
+
return result
|
|
1304
|
+
|
|
1305
|
+
async def get_latest_history(
|
|
1306
|
+
self,
|
|
1307
|
+
space_name: str,
|
|
1308
|
+
subpath: str,
|
|
1309
|
+
shortname: str,
|
|
1310
|
+
) -> Histories | None:
|
|
1311
|
+
async with self.get_session() as session:
|
|
1312
|
+
try:
|
|
1313
|
+
statement = select(Histories).where(
|
|
1314
|
+
col(Histories.space_name) == space_name,
|
|
1315
|
+
col(Histories.subpath) == subpath,
|
|
1316
|
+
col(Histories.shortname) == shortname
|
|
1317
|
+
).order_by(Histories.timestamp.desc()).limit(1) # type: ignore
|
|
1318
|
+
result = await session.execute(statement)
|
|
1319
|
+
return result.scalars().first() # type: ignore
|
|
1320
|
+
except Exception as _: # type: ignore
|
|
1321
|
+
return None
|
|
1322
|
+
|
|
1323
|
+
async def query(
|
|
1324
|
+
self, query: api.Query, user_shortname: str | None = None
|
|
1325
|
+
) -> Tuple[int, list[core.Record]]:
|
|
1326
|
+
total: int
|
|
1327
|
+
results: list
|
|
1328
|
+
|
|
1329
|
+
if not query.subpath.startswith("/"):
|
|
1330
|
+
query.subpath = f"/{query.subpath}"
|
|
1331
|
+
if query.subpath == "//":
|
|
1332
|
+
query.subpath = "/"
|
|
1333
|
+
|
|
1334
|
+
user_shortname = user_shortname if user_shortname else "anonymous"
|
|
1335
|
+
if user_shortname == "anonymous" and query.type in [QueryType.history, QueryType.events]:
|
|
1336
|
+
raise api.Exception(
|
|
1337
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
1338
|
+
api.Error(
|
|
1339
|
+
type="request",
|
|
1340
|
+
code=InternalErrorCode.NOT_ALLOWED,
|
|
1341
|
+
message="You don't have permission to this action",
|
|
1342
|
+
),
|
|
1343
|
+
)
|
|
1344
|
+
user_query_policies = await get_user_query_policies(
|
|
1345
|
+
self, user_shortname, query.space_name, query.subpath, query.type == QueryType.spaces
|
|
1346
|
+
)
|
|
1347
|
+
if not query.exact_subpath:
|
|
1348
|
+
r = await get_user_query_policies(
|
|
1349
|
+
self, user_shortname, query.space_name, f'{query.subpath}/%'.replace('//', '/'),
|
|
1350
|
+
query.type == QueryType.spaces
|
|
1351
|
+
)
|
|
1352
|
+
user_query_policies.extend(r)
|
|
1353
|
+
|
|
1354
|
+
if len(user_query_policies) == 0:
|
|
1355
|
+
return 0, []
|
|
1356
|
+
|
|
1357
|
+
if query.type in [QueryType.attachments, QueryType.attachments_aggregation]:
|
|
1358
|
+
table = Attachments
|
|
1359
|
+
statement = select(table).options(defer(table.media)) # type: ignore
|
|
1360
|
+
else:
|
|
1361
|
+
table = set_table_for_query(query)
|
|
1362
|
+
statement = select(table)
|
|
1363
|
+
|
|
1364
|
+
user_permissions = await self.get_user_permissions(user_shortname)
|
|
1365
|
+
filtered_policies = []
|
|
1366
|
+
|
|
1367
|
+
_subpath_target_permissions = '/' if query.subpath == '/' else query.subpath.removeprefix('/')
|
|
1368
|
+
if query.filter_types:
|
|
1369
|
+
for ft in query.filter_types:
|
|
1370
|
+
target_permissions = f'{query.space_name}:{_subpath_target_permissions}:{ft}'
|
|
1371
|
+
filtered_policies = [policy for policy in user_query_policies if
|
|
1372
|
+
policy.startswith(target_permissions)]
|
|
1373
|
+
else:
|
|
1374
|
+
target_permissions = f'{query.space_name}:{_subpath_target_permissions}'
|
|
1375
|
+
filtered_policies = [policy for policy in user_query_policies if policy.startswith(target_permissions)]
|
|
1376
|
+
|
|
1377
|
+
ffv_spaces, ffv_subpath, ffv_resource_type, ffv_query = [], [], [], []
|
|
1378
|
+
for user_query_policy in filtered_policies:
|
|
1379
|
+
for perm_key in user_permissions.keys():
|
|
1380
|
+
if user_query_policy.startswith(perm_key):
|
|
1381
|
+
if ffv := user_permissions[perm_key]['filter_fields_values']:
|
|
1382
|
+
if ffv not in ffv_query:
|
|
1383
|
+
ffv_query.append(ffv)
|
|
1384
|
+
perm_key_splited = perm_key.split(':')
|
|
1385
|
+
ffv_spaces.append(perm_key_splited[0])
|
|
1386
|
+
ffv_subpath.append(perm_key_splited[1])
|
|
1387
|
+
ffv_resource_type.append(perm_key_splited[2])
|
|
1388
|
+
|
|
1389
|
+
if len(ffv_spaces):
|
|
1390
|
+
perm_key_splited_query = f'@space_name:{"|".join(ffv_spaces)} @subpath:/{"|/".join(ffv_subpath)} @resource_type:{"|".join(ffv_resource_type)} {" ".join(ffv_query)}'
|
|
1391
|
+
if query.search:
|
|
1392
|
+
query.search += f' {perm_key_splited_query}'
|
|
1393
|
+
else:
|
|
1394
|
+
query.search = perm_key_splited_query
|
|
1395
|
+
if query.search:
|
|
1396
|
+
parts = [p for p in query.search.split(' ') if p]
|
|
1397
|
+
seen = set()
|
|
1398
|
+
deduped_parts = []
|
|
1399
|
+
for p in parts:
|
|
1400
|
+
if p not in seen:
|
|
1401
|
+
seen.add(p)
|
|
1402
|
+
deduped_parts.append(p)
|
|
1403
|
+
query.search = ' '.join(deduped_parts)
|
|
1404
|
+
statement_total = select(func.count(col(table.uuid)))
|
|
1405
|
+
|
|
1406
|
+
if query and query.type == QueryType.events:
|
|
1407
|
+
try:
|
|
1408
|
+
return await events_query(query, user_shortname)
|
|
1409
|
+
except Exception as e:
|
|
1410
|
+
print(e)
|
|
1411
|
+
return 0, []
|
|
1412
|
+
|
|
1413
|
+
if query and query.type == QueryType.tags:
|
|
1414
|
+
try:
|
|
1415
|
+
statement = await set_sql_statement_from_query(table, statement, query, False)
|
|
1416
|
+
statement_total = await set_sql_statement_from_query(table, statement_total, query, True)
|
|
1417
|
+
async with self.get_session() as session:
|
|
1418
|
+
results = list((await session.execute(statement)).all())
|
|
1419
|
+
if len(results) == 0:
|
|
1420
|
+
return 0, []
|
|
1421
|
+
|
|
1422
|
+
tags = []
|
|
1423
|
+
tag_counts = {}
|
|
1424
|
+
if query.retrieve_json_payload:
|
|
1425
|
+
for result in results:
|
|
1426
|
+
if result and len(result) > 1 and result[0]:
|
|
1427
|
+
tag = result[0]
|
|
1428
|
+
count = result[1]
|
|
1429
|
+
tags.append(tag)
|
|
1430
|
+
tag_counts[tag] = count
|
|
1431
|
+
else:
|
|
1432
|
+
for result in results:
|
|
1433
|
+
if result and len(result) > 0 and result[0]:
|
|
1434
|
+
tags.append(result[0])
|
|
1435
|
+
async with self.get_session() as session:
|
|
1436
|
+
_total = (await session.execute(statement_total)).one()
|
|
1437
|
+
total = int(_total[0])
|
|
1438
|
+
|
|
1439
|
+
attributes = {"tags": tags}
|
|
1440
|
+
if query.retrieve_json_payload and tag_counts:
|
|
1441
|
+
attributes["tag_counts"] = tag_counts # type: ignore
|
|
1442
|
+
|
|
1443
|
+
return total, [core.Record(
|
|
1444
|
+
resource_type=core.ResourceType.content,
|
|
1445
|
+
shortname="tags",
|
|
1446
|
+
subpath=query.subpath,
|
|
1447
|
+
attributes=attributes,
|
|
1448
|
+
)]
|
|
1449
|
+
except Exception as e:
|
|
1450
|
+
print("[!!query_tags]", e)
|
|
1451
|
+
return 0, []
|
|
1452
|
+
|
|
1453
|
+
is_fetching_spaces = False
|
|
1454
|
+
if (query.space_name
|
|
1455
|
+
and query.type == QueryType.spaces
|
|
1456
|
+
and query.space_name == "management"
|
|
1457
|
+
and query.subpath == "/"):
|
|
1458
|
+
is_fetching_spaces = True
|
|
1459
|
+
statement = select(Spaces) # type: ignore
|
|
1460
|
+
statement_total = select(func.count(col(Spaces.uuid)))
|
|
1461
|
+
else:
|
|
1462
|
+
statement = await set_sql_statement_from_query(table, statement, query, False)
|
|
1463
|
+
statement_total = await set_sql_statement_from_query(table, statement_total, query, True)
|
|
1464
|
+
|
|
1465
|
+
if query.type != QueryType.spaces:
|
|
1466
|
+
statement = apply_acl_and_query_policies(statement, table, user_shortname, user_query_policies)
|
|
1467
|
+
statement_total = apply_acl_and_query_policies(statement_total, table, user_shortname,
|
|
1468
|
+
user_query_policies)
|
|
1469
|
+
|
|
1470
|
+
try:
|
|
1471
|
+
if query.type == QueryType.aggregation and query.aggregation_data and bool(
|
|
1472
|
+
query.aggregation_data.group_by):
|
|
1473
|
+
statement_total = select(
|
|
1474
|
+
func.sum(statement_total.c["count"]).label('total_count')
|
|
1475
|
+
)
|
|
1476
|
+
|
|
1477
|
+
async with self.get_session() as session:
|
|
1478
|
+
if query.retrieve_total:
|
|
1479
|
+
_total = (await session.execute(statement_total)).one()
|
|
1480
|
+
total = int(_total[0])
|
|
1481
|
+
else:
|
|
1482
|
+
total = -1
|
|
1483
|
+
if query.type == QueryType.counters:
|
|
1484
|
+
return total, []
|
|
1485
|
+
|
|
1486
|
+
if query.type == QueryType.attachments_aggregation:
|
|
1487
|
+
# For aggregation, we need tuples
|
|
1488
|
+
results = list((await session.execute(statement)).all())
|
|
1489
|
+
await session.close()
|
|
1490
|
+
attributes = {}
|
|
1491
|
+
for item in results:
|
|
1492
|
+
attributes.update({item[0]: item[1]})
|
|
1493
|
+
return 1, [core.Record(
|
|
1494
|
+
resource_type=ResourceType.content,
|
|
1495
|
+
uuid=uuid4(),
|
|
1496
|
+
shortname='aggregation_result',
|
|
1497
|
+
subpath=query.subpath,
|
|
1498
|
+
attributes=attributes
|
|
1499
|
+
)]
|
|
1500
|
+
elif query.type == QueryType.aggregation:
|
|
1501
|
+
results = list((await session.execute(statement)).all())
|
|
1502
|
+
await session.close()
|
|
1503
|
+
else:
|
|
1504
|
+
# Non-aggregation: fetch ORM instances directly
|
|
1505
|
+
results = (await session.execute(statement)).scalars().all()
|
|
1506
|
+
await session.close()
|
|
1507
|
+
|
|
1508
|
+
if is_fetching_spaces:
|
|
1509
|
+
from utils.access_control import access_control
|
|
1510
|
+
results = [result for result in results if await access_control.check_space_access(
|
|
1511
|
+
user_shortname if user_shortname else "anonymous", result.shortname
|
|
1512
|
+
)]
|
|
1513
|
+
if len(results) == 0:
|
|
1514
|
+
return 0, []
|
|
1515
|
+
|
|
1516
|
+
results = await self._set_query_final_results(query, results)
|
|
1517
|
+
|
|
1518
|
+
if getattr(query, 'join', None):
|
|
1519
|
+
try:
|
|
1520
|
+
results = await self._apply_client_joins(results, query.join, user_shortname or "anonymous") # type: ignore
|
|
1521
|
+
except Exception as e:
|
|
1522
|
+
print("[!client_join]", e)
|
|
1523
|
+
|
|
1524
|
+
except Exception as e:
|
|
1525
|
+
print("[!!query]", e)
|
|
1526
|
+
raise api.Exception(
|
|
1527
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1528
|
+
error=api.Error(
|
|
1529
|
+
type="query",
|
|
1530
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
1531
|
+
message=str(e),
|
|
1532
|
+
),
|
|
1533
|
+
)
|
|
1534
|
+
return total, results
|
|
1535
|
+
|
|
1536
|
+
async def _apply_client_joins(self, base_records: list[core.Record], joins: list[api.JoinQuery], user_shortname: str) -> list[core.Record]:
|
|
1537
|
+
def parse_join_on(expr: str) -> list[tuple[str, bool, str, bool]]:
|
|
1538
|
+
joins_list = []
|
|
1539
|
+
for part in expr.split(','):
|
|
1540
|
+
part = part.strip()
|
|
1541
|
+
if not part:
|
|
1542
|
+
continue
|
|
1543
|
+
parts = [p.strip() for p in part.split(':', 1)]
|
|
1544
|
+
if len(parts) != 2:
|
|
1545
|
+
raise ValueError(f"Invalid join_on expression: {expr}")
|
|
1546
|
+
left, right = parts[0], parts[1]
|
|
1547
|
+
_l_arr = left.endswith('[]')
|
|
1548
|
+
_r_arr = right.endswith('[]')
|
|
1549
|
+
if _l_arr:
|
|
1550
|
+
left = left[:-2]
|
|
1551
|
+
if _r_arr:
|
|
1552
|
+
right = right[:-2]
|
|
1553
|
+
joins_list.append((left, _l_arr, right, _r_arr))
|
|
1554
|
+
return joins_list
|
|
1555
|
+
|
|
1556
|
+
def get_values_from_record(rec: core.Record, path: str, array_hint: bool) -> list:
|
|
1557
|
+
if path in ("shortname", "resource_type", "subpath", "uuid"):
|
|
1558
|
+
val = getattr(rec, path, None)
|
|
1559
|
+
elif path == "space_name":
|
|
1560
|
+
val = rec.attributes.get("space_name") if rec.attributes else None
|
|
1561
|
+
else:
|
|
1562
|
+
container = rec.attributes or {}
|
|
1563
|
+
val = get_nested_value(container, path)
|
|
1564
|
+
|
|
1565
|
+
if val is None:
|
|
1566
|
+
return []
|
|
1567
|
+
if isinstance(val, list):
|
|
1568
|
+
out = []
|
|
1569
|
+
for item in val:
|
|
1570
|
+
if isinstance(item, (str, int, float, bool)) or item is None:
|
|
1571
|
+
out.append(item)
|
|
1572
|
+
return out
|
|
1573
|
+
|
|
1574
|
+
if array_hint:
|
|
1575
|
+
return [val]
|
|
1576
|
+
return [val]
|
|
1577
|
+
|
|
1578
|
+
for rec in base_records:
|
|
1579
|
+
if rec.attributes is None:
|
|
1580
|
+
rec.attributes = {}
|
|
1581
|
+
if rec.attributes.get('join') is None:
|
|
1582
|
+
rec.attributes['join'] = {}
|
|
1583
|
+
|
|
1584
|
+
import models.api as api
|
|
1585
|
+
for join_item in joins:
|
|
1586
|
+
join_on = getattr(join_item, 'join_on', None)
|
|
1587
|
+
alias = getattr(join_item, 'alias', None)
|
|
1588
|
+
q = getattr(join_item, 'query', None)
|
|
1589
|
+
if not join_on or not alias or q is None:
|
|
1590
|
+
continue
|
|
1591
|
+
|
|
1592
|
+
parsed_joins = parse_join_on(join_on)
|
|
1593
|
+
if not parsed_joins:
|
|
1594
|
+
continue
|
|
1595
|
+
|
|
1596
|
+
sub_query = q if isinstance(q, api.Query) else api.Query.model_validate(q)
|
|
1597
|
+
q_raw = q if isinstance(q, dict) else q.model_dump(exclude_defaults=True)
|
|
1598
|
+
user_limit = q_raw.get('limit') or q_raw.get('limit_')
|
|
1599
|
+
sub_query.limit = settings.max_query_limit
|
|
1600
|
+
sub_query = copy(sub_query)
|
|
1601
|
+
|
|
1602
|
+
search_terms = []
|
|
1603
|
+
possible_match = True
|
|
1604
|
+
|
|
1605
|
+
for l_path, l_arr, r_path, r_arr in parsed_joins:
|
|
1606
|
+
left_values = set()
|
|
1607
|
+
for br in base_records:
|
|
1608
|
+
l_vals = get_values_from_record(br, l_path, l_arr)
|
|
1609
|
+
for v in l_vals:
|
|
1610
|
+
if v is not None:
|
|
1611
|
+
left_values.add(str(v))
|
|
1612
|
+
|
|
1613
|
+
if not left_values:
|
|
1614
|
+
possible_match = False
|
|
1615
|
+
break
|
|
1616
|
+
|
|
1617
|
+
search_val = "|".join(left_values)
|
|
1618
|
+
search_terms.append(f"@{r_path}:{search_val}")
|
|
1619
|
+
|
|
1620
|
+
if not possible_match:
|
|
1621
|
+
right_records: list[core.Record] = []
|
|
1622
|
+
else:
|
|
1623
|
+
search_term = " ".join(search_terms)
|
|
1624
|
+
if sub_query.search:
|
|
1625
|
+
sub_query.search = f"{sub_query.search} {search_term}"
|
|
1626
|
+
else:
|
|
1627
|
+
sub_query.search = search_term
|
|
1628
|
+
|
|
1629
|
+
_total, right_records = await self.query(sub_query, user_shortname)
|
|
1630
|
+
|
|
1631
|
+
first_join = parsed_joins[0]
|
|
1632
|
+
l_path_0, l_arr_0, r_path_0, r_arr_0 = first_join
|
|
1633
|
+
|
|
1634
|
+
right_index: dict[str, list[core.Record]] = {}
|
|
1635
|
+
for rr in right_records:
|
|
1636
|
+
r_vals = get_values_from_record(rr, r_path_0, r_arr_0)
|
|
1637
|
+
for v in r_vals:
|
|
1638
|
+
if v is None:
|
|
1639
|
+
continue
|
|
1640
|
+
key = str(v)
|
|
1641
|
+
right_index.setdefault(key, []).append(rr)
|
|
1642
|
+
|
|
1643
|
+
for br in base_records:
|
|
1644
|
+
l_vals = get_values_from_record(br, l_path_0, l_arr_0)
|
|
1645
|
+
candidates: list[core.Record] = []
|
|
1646
|
+
for v in l_vals:
|
|
1647
|
+
if v is None:
|
|
1648
|
+
continue
|
|
1649
|
+
key = str(v)
|
|
1650
|
+
if key in right_index:
|
|
1651
|
+
candidates.extend(right_index[key])
|
|
1652
|
+
|
|
1653
|
+
seen = set()
|
|
1654
|
+
unique_candidates = []
|
|
1655
|
+
for c in candidates:
|
|
1656
|
+
uid = f"{c.subpath}:{c.shortname}:{c.resource_type}"
|
|
1657
|
+
if uid in seen:
|
|
1658
|
+
continue
|
|
1659
|
+
seen.add(uid)
|
|
1660
|
+
unique_candidates.append(c)
|
|
1661
|
+
|
|
1662
|
+
matched = []
|
|
1663
|
+
for cand in unique_candidates:
|
|
1664
|
+
all_match = True
|
|
1665
|
+
for i in range(1, len(parsed_joins)):
|
|
1666
|
+
l_p, l_a, r_p, r_a = parsed_joins[i]
|
|
1667
|
+
l_vs = set(str(x) for x in get_values_from_record(br, l_p, l_a) if x is not None)
|
|
1668
|
+
r_vs = set(str(x) for x in get_values_from_record(cand, r_p, r_a) if x is not None)
|
|
1669
|
+
|
|
1670
|
+
if not l_vs.intersection(r_vs):
|
|
1671
|
+
all_match = False
|
|
1672
|
+
break
|
|
1673
|
+
|
|
1674
|
+
if all_match:
|
|
1675
|
+
matched.append(cand)
|
|
1676
|
+
|
|
1677
|
+
if user_limit:
|
|
1678
|
+
matched = matched[:user_limit]
|
|
1679
|
+
|
|
1680
|
+
br.attributes['join'][alias] = matched
|
|
1681
|
+
|
|
1682
|
+
return base_records
|
|
1683
|
+
|
|
1684
|
+
async def load_or_none(
|
|
1685
|
+
self,
|
|
1686
|
+
space_name: str,
|
|
1687
|
+
subpath: str,
|
|
1688
|
+
shortname: str,
|
|
1689
|
+
class_type: Type[MetaChild],
|
|
1690
|
+
user_shortname: str | None = None,
|
|
1691
|
+
schema_shortname: str | None = None,
|
|
1692
|
+
) -> MetaChild | None:
|
|
1693
|
+
|
|
1694
|
+
result = await self.db_load_or_none(space_name, subpath, shortname, class_type, user_shortname,
|
|
1695
|
+
schema_shortname)
|
|
1696
|
+
if not result:
|
|
1697
|
+
return None
|
|
1698
|
+
|
|
1699
|
+
try:
|
|
1700
|
+
if hasattr(result, 'payload') and result.payload and isinstance(result.payload, dict):
|
|
1701
|
+
if result.payload.get("body", None) is None:
|
|
1702
|
+
result.payload["body"] = {}
|
|
1703
|
+
result.payload = core.Payload.model_validate(result.payload, strict=False)
|
|
1704
|
+
except Exception as e:
|
|
1705
|
+
print("[!load]", e)
|
|
1706
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
1707
|
+
return class_type.model_validate(result.model_dump())
|
|
1708
|
+
|
|
1709
|
+
async def load(
|
|
1710
|
+
self,
|
|
1711
|
+
space_name: str,
|
|
1712
|
+
subpath: str,
|
|
1713
|
+
shortname: str,
|
|
1714
|
+
class_type: Type[MetaChild],
|
|
1715
|
+
user_shortname: str | None = None,
|
|
1716
|
+
schema_shortname: str | None = None,
|
|
1717
|
+
) -> MetaChild:
|
|
1718
|
+
meta: MetaChild | None = await self.load_or_none(space_name, subpath, shortname, class_type, user_shortname,
|
|
1719
|
+
schema_shortname)
|
|
1720
|
+
if meta is None:
|
|
1721
|
+
raise api.Exception(
|
|
1722
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1723
|
+
error=api.Error(
|
|
1724
|
+
type="db",
|
|
1725
|
+
code=InternalErrorCode.OBJECT_NOT_FOUND,
|
|
1726
|
+
message=f"Request object is not available @{space_name}/{subpath}/{shortname} {class_type=} {schema_shortname=}",
|
|
1727
|
+
),
|
|
1728
|
+
)
|
|
1729
|
+
|
|
1730
|
+
return meta
|
|
1731
|
+
|
|
1732
|
+
async def load_resource_payload(
|
|
1733
|
+
self,
|
|
1734
|
+
space_name: str,
|
|
1735
|
+
subpath: str,
|
|
1736
|
+
filename: str,
|
|
1737
|
+
class_type: Type[MetaChild],
|
|
1738
|
+
schema_shortname: str | None = None,
|
|
1739
|
+
) -> dict[str, Any] | None:
|
|
1740
|
+
"""Load a Meta class payload file"""
|
|
1741
|
+
async with self.get_session() as session:
|
|
1742
|
+
table = self.get_table(class_type)
|
|
1743
|
+
if not subpath.startswith("/"):
|
|
1744
|
+
subpath = f"/{subpath}"
|
|
1745
|
+
statement = select(table).where(table.space_name == space_name)
|
|
1746
|
+
|
|
1747
|
+
if table in [Roles, Permissions, Users]:
|
|
1748
|
+
statement = statement.where(table.shortname == filename.replace('.json', ''))
|
|
1749
|
+
elif table in [Entries, Attachments, Histories]:
|
|
1750
|
+
statement = statement.where(table.subpath == subpath).where(
|
|
1751
|
+
table.shortname == filename.replace('.json', '')
|
|
1752
|
+
)
|
|
1753
|
+
|
|
1754
|
+
result = (await session.execute(statement)).one_or_none()
|
|
1755
|
+
if result is None:
|
|
1756
|
+
return None
|
|
1757
|
+
result = result[0]
|
|
1758
|
+
var: dict = result.model_dump().get("payload", {}).get("body", {})
|
|
1759
|
+
return var
|
|
1760
|
+
|
|
1761
|
+
async def _validate_referential_integrity(self, meta: core.Meta):
|
|
1762
|
+
if isinstance(meta, core.User):
|
|
1763
|
+
if meta.roles:
|
|
1764
|
+
for role in meta.roles:
|
|
1765
|
+
if not await self.load_or_none(settings.management_space, 'roles', role, core.Role):
|
|
1766
|
+
raise api.Exception(
|
|
1767
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1768
|
+
error=api.Error(
|
|
1769
|
+
type="validation",
|
|
1770
|
+
code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
|
|
1771
|
+
message=f"Role '{role}' does not exist",
|
|
1772
|
+
),
|
|
1773
|
+
)
|
|
1774
|
+
if meta.groups:
|
|
1775
|
+
for group in meta.groups:
|
|
1776
|
+
if not await self.load_or_none(settings.management_space, 'groups', group, core.Group):
|
|
1777
|
+
raise api.Exception(
|
|
1778
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1779
|
+
error=api.Error(
|
|
1780
|
+
type="validation",
|
|
1781
|
+
code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
|
|
1782
|
+
message=f"Group '{group}' does not exist",
|
|
1783
|
+
),
|
|
1784
|
+
)
|
|
1785
|
+
elif isinstance(meta, core.Role):
|
|
1786
|
+
if meta.permissions:
|
|
1787
|
+
for permission in meta.permissions:
|
|
1788
|
+
if not await self.load_or_none(settings.management_space, 'permissions', permission, core.Permission):
|
|
1789
|
+
raise api.Exception(
|
|
1790
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1791
|
+
error=api.Error(
|
|
1792
|
+
type="validation",
|
|
1793
|
+
code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
|
|
1794
|
+
message=f"Permission '{permission}' does not exist",
|
|
1795
|
+
),
|
|
1796
|
+
)
|
|
1797
|
+
elif isinstance(meta, core.Group):
|
|
1798
|
+
if hasattr(meta, 'roles') and meta.roles:
|
|
1799
|
+
for role in meta.roles:
|
|
1800
|
+
if not await self.load_or_none(settings.management_space, 'roles', role, core.Role):
|
|
1801
|
+
raise api.Exception(
|
|
1802
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1803
|
+
error=api.Error(
|
|
1804
|
+
type="validation",
|
|
1805
|
+
code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
|
|
1806
|
+
message=f"Role '{role}' does not exist",
|
|
1807
|
+
),
|
|
1808
|
+
)
|
|
1809
|
+
|
|
1810
|
+
async def _check_in_use(self, meta: core.Meta):
|
|
1811
|
+
async with self.get_session() as session:
|
|
1812
|
+
if isinstance(meta, core.Role):
|
|
1813
|
+
statement = select(Users.shortname).where(col(Users.roles).contains([meta.shortname]))
|
|
1814
|
+
result = await session.execute(statement)
|
|
1815
|
+
if result.first():
|
|
1816
|
+
raise api.Exception(
|
|
1817
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1818
|
+
error=api.Error(
|
|
1819
|
+
type="delete",
|
|
1820
|
+
code=InternalErrorCode.CANNT_DELETE,
|
|
1821
|
+
message=f"Role '{meta.shortname}' is in use by one or more users",
|
|
1822
|
+
),
|
|
1823
|
+
)
|
|
1824
|
+
elif isinstance(meta, core.Group):
|
|
1825
|
+
statement = select(Users.shortname).where(col(Users.groups).contains([meta.shortname]))
|
|
1826
|
+
result = await session.execute(statement)
|
|
1827
|
+
if result.first():
|
|
1828
|
+
raise api.Exception(
|
|
1829
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1830
|
+
error=api.Error(
|
|
1831
|
+
type="delete",
|
|
1832
|
+
code=InternalErrorCode.CANNT_DELETE,
|
|
1833
|
+
message=f"Group '{meta.shortname}' is in use by one or more users",
|
|
1834
|
+
),
|
|
1835
|
+
)
|
|
1836
|
+
elif isinstance(meta, core.Permission):
|
|
1837
|
+
statement = select(Roles.shortname).where(col(Roles.permissions).contains([meta.shortname]))
|
|
1838
|
+
result = await session.execute(statement)
|
|
1839
|
+
if result.first():
|
|
1840
|
+
raise api.Exception(
|
|
1841
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1842
|
+
error=api.Error(
|
|
1843
|
+
type="delete",
|
|
1844
|
+
code=InternalErrorCode.CANNT_DELETE,
|
|
1845
|
+
message=f"Permission '{meta.shortname}' is in use by one or more roles",
|
|
1846
|
+
),
|
|
1847
|
+
)
|
|
1848
|
+
|
|
1849
|
+
async def save(
|
|
1850
|
+
self, space_name: str, subpath: str, meta: core.Meta
|
|
1851
|
+
) -> Any:
|
|
1852
|
+
"""Save"""
|
|
1853
|
+
await self._validate_referential_integrity(meta)
|
|
1854
|
+
try:
|
|
1855
|
+
async with self.get_session() as session:
|
|
1856
|
+
entity = {
|
|
1857
|
+
**meta.model_dump(),
|
|
1858
|
+
"space_name": space_name,
|
|
1859
|
+
"subpath": subpath,
|
|
1860
|
+
}
|
|
1861
|
+
|
|
1862
|
+
if meta.__class__ is core.Folder:
|
|
1863
|
+
if entity["subpath"] != "/":
|
|
1864
|
+
if not entity["subpath"].startswith("/"):
|
|
1865
|
+
entity["subpath"] = f'/{entity["subpath"]}'
|
|
1866
|
+
if entity["subpath"].endswith("/"):
|
|
1867
|
+
entity["subpath"] = entity["subpath"][:-1]
|
|
1868
|
+
|
|
1869
|
+
if "subpath" in entity:
|
|
1870
|
+
if entity["subpath"] != "/" and entity["subpath"].endswith("/"):
|
|
1871
|
+
entity["subpath"] = entity["subpath"][:-1]
|
|
1872
|
+
entity["subpath"] = subpath_checker(entity["subpath"])
|
|
1873
|
+
|
|
1874
|
+
entity['resource_type'] = meta.__class__.__name__.lower()
|
|
1875
|
+
data = self.get_base_model(meta.__class__, entity)
|
|
1876
|
+
|
|
1877
|
+
if not isinstance(data, Attachments) and not isinstance(data, Histories):
|
|
1878
|
+
data.query_policies = generate_query_policies(
|
|
1879
|
+
space_name=space_name,
|
|
1880
|
+
subpath=subpath,
|
|
1881
|
+
resource_type=entity['resource_type'],
|
|
1882
|
+
is_active=entity['is_active'],
|
|
1883
|
+
owner_shortname=entity.get('owner_shortname', 'dmart'),
|
|
1884
|
+
owner_group_shortname=entity.get('owner_group_shortname', None),
|
|
1885
|
+
)
|
|
1886
|
+
session.add(data)
|
|
1887
|
+
try:
|
|
1888
|
+
await session.commit()
|
|
1889
|
+
await session.refresh(data)
|
|
1890
|
+
except Exception as e:
|
|
1891
|
+
await session.rollback()
|
|
1892
|
+
raise e
|
|
1893
|
+
# Refresh authz MVs only when Users/Roles/Permissions changed
|
|
1894
|
+
# try:
|
|
1895
|
+
# if isinstance(data, (Users, Roles, Permissions)):
|
|
1896
|
+
# await self.ensure_authz_materialized_views_fresh()
|
|
1897
|
+
# except Exception as _e:
|
|
1898
|
+
# logger.warning(f"AuthZ MV refresh after save skipped: {_e}")
|
|
1899
|
+
return data
|
|
1900
|
+
|
|
1901
|
+
except Exception as e:
|
|
1902
|
+
print("[!save]", e)
|
|
1903
|
+
logger.error(f"Failed saving an entry. Error: {e}")
|
|
1904
|
+
raise api.Exception(
|
|
1905
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1906
|
+
error=api.Error(
|
|
1907
|
+
type="db",
|
|
1908
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
1909
|
+
message=f"Failed saving an entry. Error: {e}",
|
|
1910
|
+
),
|
|
1911
|
+
)
|
|
1912
|
+
|
|
1913
|
+
async def create(self, space_name: str, subpath: str, meta: core.Meta):
|
|
1914
|
+
result = await self.load_or_none(space_name, subpath, meta.shortname, meta.__class__)
|
|
1915
|
+
|
|
1916
|
+
if result is not None:
|
|
1917
|
+
raise api.Exception(
|
|
1918
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1919
|
+
error=api.Error(
|
|
1920
|
+
type="create",
|
|
1921
|
+
code=InternalErrorCode.SHORTNAME_ALREADY_EXIST,
|
|
1922
|
+
message="already exists",
|
|
1923
|
+
),
|
|
1924
|
+
)
|
|
1925
|
+
|
|
1926
|
+
await self.save(space_name, subpath, meta)
|
|
1927
|
+
|
|
1928
|
+
async def save_payload(
|
|
1929
|
+
self, space_name: str, subpath: str, meta: core.Meta, attachment
|
|
1930
|
+
):
|
|
1931
|
+
if meta.__class__ != core.Content:
|
|
1932
|
+
media = await attachment.read()
|
|
1933
|
+
await self.update(
|
|
1934
|
+
space_name, subpath, meta,
|
|
1935
|
+
{}, {}, [],
|
|
1936
|
+
"", attachment_media=media
|
|
1937
|
+
)
|
|
1938
|
+
else:
|
|
1939
|
+
content = json.load(attachment.file)
|
|
1940
|
+
if meta.payload:
|
|
1941
|
+
meta.payload.body = content
|
|
1942
|
+
await self.update(
|
|
1943
|
+
space_name, subpath, meta,
|
|
1944
|
+
{}, {}, [],
|
|
1945
|
+
""
|
|
1946
|
+
)
|
|
1947
|
+
|
|
1948
|
+
async def save_payload_from_json(
|
|
1949
|
+
self,
|
|
1950
|
+
space_name: str,
|
|
1951
|
+
subpath: str,
|
|
1952
|
+
meta: core.Meta,
|
|
1953
|
+
payload_data: dict[str, Any],
|
|
1954
|
+
):
|
|
1955
|
+
try:
|
|
1956
|
+
result = await self.db_load_or_none(space_name, subpath, meta.shortname, meta.__class__)
|
|
1957
|
+
if result is None:
|
|
1958
|
+
raise api.Exception(
|
|
1959
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1960
|
+
error=api.Error(
|
|
1961
|
+
type="create",
|
|
1962
|
+
code=InternalErrorCode.MISSING_METADATA,
|
|
1963
|
+
message="metadata is missing",
|
|
1964
|
+
),
|
|
1965
|
+
)
|
|
1966
|
+
if meta.payload:
|
|
1967
|
+
if isinstance(meta.payload.body, dict):
|
|
1968
|
+
meta.payload.body = {
|
|
1969
|
+
**meta.payload.body,
|
|
1970
|
+
**payload_data,
|
|
1971
|
+
}
|
|
1972
|
+
else:
|
|
1973
|
+
meta.payload.body = payload_data
|
|
1974
|
+
|
|
1975
|
+
await self._validate_referential_integrity(meta)
|
|
1976
|
+
result.sqlmodel_update(meta.model_dump())
|
|
1977
|
+
async with self.get_session() as session:
|
|
1978
|
+
session.add(result)
|
|
1979
|
+
except Exception as e:
|
|
1980
|
+
print("[!save_payload_from_json]", e)
|
|
1981
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
1982
|
+
raise api.Exception(
|
|
1983
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1984
|
+
error=api.Error(
|
|
1985
|
+
type="update",
|
|
1986
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
1987
|
+
message="failed to update entry",
|
|
1988
|
+
),
|
|
1989
|
+
)
|
|
1990
|
+
|
|
1991
|
+
async def update(
|
|
1992
|
+
self,
|
|
1993
|
+
space_name: str,
|
|
1994
|
+
subpath: str,
|
|
1995
|
+
meta: core.Meta,
|
|
1996
|
+
old_version_flattend: dict,
|
|
1997
|
+
new_version_flattend: dict,
|
|
1998
|
+
updated_attributes_flattend: list,
|
|
1999
|
+
user_shortname: str,
|
|
2000
|
+
schema_shortname: str | None = None,
|
|
2001
|
+
retrieve_lock_status: bool | None = False,
|
|
2002
|
+
attachment_media: Any | None = None,
|
|
2003
|
+
) -> dict:
|
|
2004
|
+
"""Update the entry, store the difference and return it"""
|
|
2005
|
+
await self._validate_referential_integrity(meta)
|
|
2006
|
+
|
|
2007
|
+
try:
|
|
2008
|
+
result = await self.db_load_or_none(space_name, subpath, meta.shortname, meta.__class__)
|
|
2009
|
+
if result is None:
|
|
2010
|
+
raise api.Exception(
|
|
2011
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2012
|
+
error=api.Error(
|
|
2013
|
+
type="create",
|
|
2014
|
+
code=InternalErrorCode.MISSING_METADATA,
|
|
2015
|
+
message="metadata is missing",
|
|
2016
|
+
),
|
|
2017
|
+
)
|
|
2018
|
+
|
|
2019
|
+
if isinstance(result, Users) and not result.is_active and meta.is_active:
|
|
2020
|
+
await self.set_failed_password_attempt_count(result.shortname, 0)
|
|
2021
|
+
|
|
2022
|
+
result.sqlmodel_update(meta.model_dump())
|
|
2023
|
+
|
|
2024
|
+
if hasattr(result, "subpath") and (not result.subpath.startswith("/")):
|
|
2025
|
+
result.subpath = f"/{result.subpath}"
|
|
2026
|
+
|
|
2027
|
+
if isinstance(result, Attachments) and attachment_media:
|
|
2028
|
+
result.media = attachment_media
|
|
2029
|
+
if hasattr(result, 'query_policies'):
|
|
2030
|
+
result.query_policies = generate_query_policies(
|
|
2031
|
+
space_name=space_name,
|
|
2032
|
+
subpath=subpath,
|
|
2033
|
+
resource_type=result.resource_type, # type: ignore
|
|
2034
|
+
is_active=result.is_active, # type: ignore
|
|
2035
|
+
owner_shortname=result.owner_shortname,
|
|
2036
|
+
owner_group_shortname=result.owner_shortname,
|
|
2037
|
+
)
|
|
2038
|
+
|
|
2039
|
+
if meta.__class__ is not core.Lock or not isinstance(result, Locks):
|
|
2040
|
+
result.updated_at = datetime.now()
|
|
2041
|
+
new_version_flattend['updated_at'] = result.updated_at.isoformat() # type: ignore
|
|
2042
|
+
if "updated_at" not in updated_attributes_flattend:
|
|
2043
|
+
updated_attributes_flattend.append("updated_at")
|
|
2044
|
+
if 'updated_at' in old_version_flattend:
|
|
2045
|
+
old_version_flattend['updated_at'] = old_version_flattend['updated_at'].isoformat()
|
|
2046
|
+
|
|
2047
|
+
async with self.get_session() as session:
|
|
2048
|
+
session.add(result)
|
|
2049
|
+
|
|
2050
|
+
# try:
|
|
2051
|
+
# if isinstance(result, (Users, Roles, Permissions)):
|
|
2052
|
+
# await self.ensure_authz_materialized_views_fresh()
|
|
2053
|
+
# except Exception as _e:
|
|
2054
|
+
# logger.warning(f"AuthZ MV refresh after update skipped: {_e}")
|
|
2055
|
+
except Exception as e:
|
|
2056
|
+
print("[!update]", e)
|
|
2057
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
2058
|
+
raise api.Exception(
|
|
2059
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2060
|
+
error=api.Error(
|
|
2061
|
+
type="update",
|
|
2062
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
2063
|
+
message="failed to update entry",
|
|
2064
|
+
),
|
|
2065
|
+
)
|
|
2066
|
+
|
|
2067
|
+
history_diff = await self.store_entry_diff(
|
|
2068
|
+
space_name,
|
|
2069
|
+
subpath,
|
|
2070
|
+
meta.shortname,
|
|
2071
|
+
user_shortname,
|
|
2072
|
+
old_version_flattend,
|
|
2073
|
+
new_version_flattend,
|
|
2074
|
+
updated_attributes_flattend,
|
|
2075
|
+
meta.__class__,
|
|
2076
|
+
)
|
|
2077
|
+
return history_diff
|
|
2078
|
+
|
|
2079
|
+
async def update_payload(
|
|
2080
|
+
self,
|
|
2081
|
+
space_name: str,
|
|
2082
|
+
subpath: str,
|
|
2083
|
+
meta: core.Meta,
|
|
2084
|
+
payload_data: dict[str, Any],
|
|
2085
|
+
owner_shortname: str,
|
|
2086
|
+
):
|
|
2087
|
+
if not meta.payload:
|
|
2088
|
+
meta.payload = core.Payload()
|
|
2089
|
+
meta.payload.body = payload_data
|
|
2090
|
+
await self.update(
|
|
2091
|
+
space_name, subpath, meta, {}, {}, [], owner_shortname
|
|
2092
|
+
)
|
|
2093
|
+
|
|
2094
|
+
async def store_entry_diff(
|
|
2095
|
+
self,
|
|
2096
|
+
space_name: str,
|
|
2097
|
+
subpath: str,
|
|
2098
|
+
shortname: str,
|
|
2099
|
+
owner_shortname: str,
|
|
2100
|
+
old_version_flattend: dict,
|
|
2101
|
+
new_version_flattend: dict,
|
|
2102
|
+
updated_attributes_flattend: list,
|
|
2103
|
+
resource_type,
|
|
2104
|
+
) -> dict:
|
|
2105
|
+
try:
|
|
2106
|
+
diff_keys = list(old_version_flattend.keys())
|
|
2107
|
+
diff_keys.extend(list(new_version_flattend.keys()))
|
|
2108
|
+
history_diff = {}
|
|
2109
|
+
for key in set(diff_keys):
|
|
2110
|
+
# if key in updated_attributes_flattend:
|
|
2111
|
+
old = copy(old_version_flattend.get(key, "null"))
|
|
2112
|
+
new = copy(new_version_flattend.get(key, "null"))
|
|
2113
|
+
|
|
2114
|
+
if old != new:
|
|
2115
|
+
if isinstance(old, list) and isinstance(new, list):
|
|
2116
|
+
old, new = arr_remove_common(old, new)
|
|
2117
|
+
|
|
2118
|
+
history_diff[key] = {"old": old, "new": new}
|
|
2119
|
+
removed = get_removed_items(list(old_version_flattend.keys()),
|
|
2120
|
+
list(new_version_flattend.keys()))
|
|
2121
|
+
for r in removed:
|
|
2122
|
+
history_diff[r] = {
|
|
2123
|
+
"old": old_version_flattend[r],
|
|
2124
|
+
"new": None,
|
|
2125
|
+
}
|
|
2126
|
+
if not history_diff:
|
|
2127
|
+
return {}
|
|
2128
|
+
|
|
2129
|
+
new_version_json = json.dumps(new_version_flattend, sort_keys=True, default=str)
|
|
2130
|
+
new_checksum = hashlib.sha1(new_version_json.encode()).hexdigest()
|
|
2131
|
+
|
|
2132
|
+
history_obj = Histories(
|
|
2133
|
+
space_name=space_name,
|
|
2134
|
+
uuid=uuid4(),
|
|
2135
|
+
shortname=shortname,
|
|
2136
|
+
owner_shortname=owner_shortname or "__system__",
|
|
2137
|
+
timestamp=datetime.now(),
|
|
2138
|
+
request_headers=get_request_data().get("request_headers", {}),
|
|
2139
|
+
diff=history_diff,
|
|
2140
|
+
subpath=subpath,
|
|
2141
|
+
last_checksum_history=new_checksum,
|
|
2142
|
+
)
|
|
2143
|
+
|
|
2144
|
+
async with self.get_session() as session:
|
|
2145
|
+
session.add(Histories.model_validate(history_obj))
|
|
2146
|
+
table = self.get_table(resource_type)
|
|
2147
|
+
await session.execute(
|
|
2148
|
+
update(table).where(
|
|
2149
|
+
col(table.space_name) == space_name,
|
|
2150
|
+
col(table.subpath) == subpath,
|
|
2151
|
+
col(table.shortname) == shortname
|
|
2152
|
+
).values(last_checksum_history=new_checksum)
|
|
2153
|
+
)
|
|
2154
|
+
|
|
2155
|
+
return history_diff
|
|
2156
|
+
except Exception as e:
|
|
2157
|
+
print("[!store_entry_diff]", e, old_version_flattend, new_version_flattend)
|
|
2158
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
2159
|
+
return {}
|
|
2160
|
+
|
|
2161
|
+
async def move(
|
|
2162
|
+
self,
|
|
2163
|
+
src_space_name: str,
|
|
2164
|
+
src_subpath: str,
|
|
2165
|
+
src_shortname: str,
|
|
2166
|
+
dest_space_name: str,
|
|
2167
|
+
dest_subpath: str,
|
|
2168
|
+
dest_shortname: str,
|
|
2169
|
+
meta: core.Meta,
|
|
2170
|
+
):
|
|
2171
|
+
"""Move the file that match the criteria given, remove source folder if empty"""
|
|
2172
|
+
if not src_subpath.startswith("/"):
|
|
2173
|
+
src_subpath = f"/{src_subpath}"
|
|
2174
|
+
if dest_subpath and not dest_subpath.startswith("/"):
|
|
2175
|
+
dest_subpath = f"/{dest_subpath}"
|
|
2176
|
+
|
|
2177
|
+
origin = await self.db_load_or_none(src_space_name, src_subpath, src_shortname, meta.__class__)
|
|
2178
|
+
if isinstance(origin, Locks):
|
|
2179
|
+
raise api.Exception(
|
|
2180
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2181
|
+
error=api.Error(
|
|
2182
|
+
type="move",
|
|
2183
|
+
code=InternalErrorCode.NOT_ALLOWED,
|
|
2184
|
+
message="Locks cannot be moved",
|
|
2185
|
+
),
|
|
2186
|
+
)
|
|
2187
|
+
if origin is None:
|
|
2188
|
+
raise api.Exception(
|
|
2189
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2190
|
+
error=api.Error(
|
|
2191
|
+
type="move",
|
|
2192
|
+
code=InternalErrorCode.SHORTNAME_DOES_NOT_EXIST,
|
|
2193
|
+
message="Entry does not exist",
|
|
2194
|
+
),
|
|
2195
|
+
)
|
|
2196
|
+
|
|
2197
|
+
async with self.get_session() as session:
|
|
2198
|
+
old_shortname = ""
|
|
2199
|
+
old_subpath = ""
|
|
2200
|
+
try:
|
|
2201
|
+
old_shortname = origin.shortname
|
|
2202
|
+
if hasattr(origin, 'subpath'):
|
|
2203
|
+
old_subpath = origin.subpath
|
|
2204
|
+
table = self.get_table(meta.__class__)
|
|
2205
|
+
statement = select(table).where(table.space_name == dest_space_name)
|
|
2206
|
+
|
|
2207
|
+
if table in [Roles, Permissions, Users, Spaces]:
|
|
2208
|
+
statement = statement.where(table.shortname == dest_shortname)
|
|
2209
|
+
else:
|
|
2210
|
+
statement = statement.where(table.subpath == dest_subpath).where(
|
|
2211
|
+
table.shortname == dest_shortname
|
|
2212
|
+
)
|
|
2213
|
+
|
|
2214
|
+
target = (await session.execute(statement)).one_or_none()
|
|
2215
|
+
if target is not None:
|
|
2216
|
+
raise api.Exception(
|
|
2217
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2218
|
+
error=api.Error(
|
|
2219
|
+
type="move",
|
|
2220
|
+
code=InternalErrorCode.SHORTNAME_ALREADY_EXIST,
|
|
2221
|
+
message="already exists",
|
|
2222
|
+
),
|
|
2223
|
+
)
|
|
2224
|
+
if dest_shortname:
|
|
2225
|
+
origin.shortname = dest_shortname
|
|
2226
|
+
|
|
2227
|
+
if hasattr(origin, 'subpath') and dest_subpath:
|
|
2228
|
+
origin.subpath = dest_subpath
|
|
2229
|
+
|
|
2230
|
+
if hasattr(origin, 'space_name') and dest_space_name:
|
|
2231
|
+
origin.space_name = dest_space_name
|
|
2232
|
+
|
|
2233
|
+
origin.query_policies = generate_query_policies(
|
|
2234
|
+
space_name=dest_space_name,
|
|
2235
|
+
subpath=dest_subpath,
|
|
2236
|
+
resource_type=origin.resource_type if hasattr(origin,
|
|
2237
|
+
'resource_type') else origin.__class__.__name__.lower()[
|
|
2238
|
+
:-1],
|
|
2239
|
+
is_active=origin.is_active if hasattr(origin, 'is_active') else True,
|
|
2240
|
+
owner_shortname=origin.owner_shortname,
|
|
2241
|
+
owner_group_shortname=None,
|
|
2242
|
+
)
|
|
2243
|
+
|
|
2244
|
+
session.add(origin)
|
|
2245
|
+
try:
|
|
2246
|
+
if table is Spaces:
|
|
2247
|
+
await session.execute(
|
|
2248
|
+
update(Spaces)
|
|
2249
|
+
.where(col(Spaces.space_name) == src_space_name)
|
|
2250
|
+
.values(space_name=dest_shortname,shortname=dest_shortname)
|
|
2251
|
+
)
|
|
2252
|
+
await session.execute(
|
|
2253
|
+
update(Entries)
|
|
2254
|
+
.where(col(Entries.space_name) == src_space_name)
|
|
2255
|
+
.values(space_name=dest_shortname)
|
|
2256
|
+
)
|
|
2257
|
+
await session.execute(
|
|
2258
|
+
update(Attachments)
|
|
2259
|
+
.where(col(Attachments.space_name) == src_space_name)
|
|
2260
|
+
.values(space_name=dest_shortname)
|
|
2261
|
+
)
|
|
2262
|
+
except Exception as e:
|
|
2263
|
+
origin.shortname = old_shortname
|
|
2264
|
+
if hasattr(origin, 'subpath'):
|
|
2265
|
+
origin.subpath = old_subpath
|
|
2266
|
+
|
|
2267
|
+
session.add(origin)
|
|
2268
|
+
|
|
2269
|
+
print("[!move]", e)
|
|
2270
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
2271
|
+
raise api.Exception(
|
|
2272
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2273
|
+
error=api.Error(
|
|
2274
|
+
type="move",
|
|
2275
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
2276
|
+
message="failed to move entry",
|
|
2277
|
+
),
|
|
2278
|
+
)
|
|
2279
|
+
except Exception as e:
|
|
2280
|
+
print("[!move]", e)
|
|
2281
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
2282
|
+
raise api.Exception(
|
|
2283
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2284
|
+
error=api.Error(
|
|
2285
|
+
type="move",
|
|
2286
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
2287
|
+
message="failed to move entry",
|
|
2288
|
+
),
|
|
2289
|
+
)
|
|
2290
|
+
|
|
2291
|
+
def delete_empty(self, path: Path):
|
|
2292
|
+
pass
|
|
2293
|
+
|
|
2294
|
+
async def clone(
|
|
2295
|
+
self,
|
|
2296
|
+
src_space: str,
|
|
2297
|
+
dest_space: str,
|
|
2298
|
+
src_subpath: str,
|
|
2299
|
+
src_shortname: str,
|
|
2300
|
+
dest_subpath: str,
|
|
2301
|
+
dest_shortname: str,
|
|
2302
|
+
class_type: Type[MetaChild],
|
|
2303
|
+
):
|
|
2304
|
+
pass
|
|
2305
|
+
|
|
2306
|
+
async def is_entry_exist(self,
|
|
2307
|
+
space_name: str,
|
|
2308
|
+
subpath: str,
|
|
2309
|
+
shortname: str,
|
|
2310
|
+
resource_type: ResourceType,
|
|
2311
|
+
schema_shortname: str | None = None, ) -> bool:
|
|
2312
|
+
async with self.get_session() as session:
|
|
2313
|
+
resource_cls = getattr(
|
|
2314
|
+
sys.modules["models.core"], camel_case(resource_type)
|
|
2315
|
+
)
|
|
2316
|
+
|
|
2317
|
+
table = self.get_table(resource_cls)
|
|
2318
|
+
if not subpath.startswith("/"):
|
|
2319
|
+
subpath = f"/{subpath}"
|
|
2320
|
+
|
|
2321
|
+
statement = select(table).where(table.space_name == space_name)
|
|
2322
|
+
|
|
2323
|
+
if table in [Roles, Permissions, Users]:
|
|
2324
|
+
statement = statement.where(table.shortname == shortname)
|
|
2325
|
+
elif resource_cls in [
|
|
2326
|
+
core.Alteration,
|
|
2327
|
+
core.Media,
|
|
2328
|
+
core.Lock,
|
|
2329
|
+
core.Comment,
|
|
2330
|
+
core.Reply,
|
|
2331
|
+
core.Reaction,
|
|
2332
|
+
core.Json,
|
|
2333
|
+
core.DataAsset,
|
|
2334
|
+
]:
|
|
2335
|
+
statement = statement.where(table.subpath == subpath).where(
|
|
2336
|
+
table.shortname == shortname
|
|
2337
|
+
)
|
|
2338
|
+
|
|
2339
|
+
else:
|
|
2340
|
+
statement = statement.where(table.subpath == subpath).where(
|
|
2341
|
+
table.shortname == shortname
|
|
2342
|
+
)
|
|
2343
|
+
|
|
2344
|
+
result = (await session.execute(statement)).fetchall()
|
|
2345
|
+
result = [result[0] for result in result]
|
|
2346
|
+
return False if len(result) == 0 else True
|
|
2347
|
+
|
|
2348
|
+
async def delete(
|
|
2349
|
+
self,
|
|
2350
|
+
space_name: str,
|
|
2351
|
+
subpath: str,
|
|
2352
|
+
meta: core.Meta,
|
|
2353
|
+
user_shortname: str,
|
|
2354
|
+
schema_shortname: str | None = None,
|
|
2355
|
+
retrieve_lock_status: bool | None = False,
|
|
2356
|
+
):
|
|
2357
|
+
"""Delete the file that match the criteria given, remove folder if empty"""
|
|
2358
|
+
await self._check_in_use(meta)
|
|
2359
|
+
async with self.get_session() as session:
|
|
2360
|
+
try:
|
|
2361
|
+
if not subpath.startswith("/"):
|
|
2362
|
+
subpath = f"/{subpath}"
|
|
2363
|
+
|
|
2364
|
+
result = await self.db_load_or_none(space_name, subpath, meta.shortname, meta.__class__)
|
|
2365
|
+
|
|
2366
|
+
if meta.__class__ == core.User:
|
|
2367
|
+
try:
|
|
2368
|
+
await session.execute(update(Spaces).where(col(Spaces.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2369
|
+
await session.execute(update(Entries).where(col(Entries.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2370
|
+
await session.execute(update(Attachments).where(col(Attachments.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2371
|
+
await session.execute(update(Roles).where(col(Roles.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2372
|
+
await session.execute(update(Permissions).where(col(Permissions.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2373
|
+
|
|
2374
|
+
await session.execute(update(Locks).where(col(Locks.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2375
|
+
await session.execute(update(Histories).where(col(Histories.owner_shortname) == meta.shortname).values(owner_shortname="anonymous"))
|
|
2376
|
+
|
|
2377
|
+
await session.execute(delete(Sessions).where(col(Sessions.shortname) == meta.shortname))
|
|
2378
|
+
except Exception as _e:
|
|
2379
|
+
logger.warning(f"Failed to reassign ownership to anonymous for user {meta.shortname}: {_e}")
|
|
2380
|
+
|
|
2381
|
+
await session.delete(result)
|
|
2382
|
+
if meta.__class__ == core.Space:
|
|
2383
|
+
statement2 = delete(Attachments).where(col(Attachments.space_name) == space_name)
|
|
2384
|
+
await session.execute(statement2)
|
|
2385
|
+
statement = delete(Entries).where(col(Entries.space_name) == space_name)
|
|
2386
|
+
await session.execute(statement)
|
|
2387
|
+
if meta.__class__ == core.Folder:
|
|
2388
|
+
_subpath = f"{subpath}/{meta.shortname}".replace('//', '/')
|
|
2389
|
+
statement2 = delete(Attachments) \
|
|
2390
|
+
.where(col(Attachments.space_name) == space_name) \
|
|
2391
|
+
.where(col(Attachments.subpath).startswith(_subpath))
|
|
2392
|
+
await session.execute(statement2)
|
|
2393
|
+
statement = delete(Entries) \
|
|
2394
|
+
.where(col(Entries.space_name) == space_name) \
|
|
2395
|
+
.where(col(Entries.subpath).startswith(_subpath))
|
|
2396
|
+
await session.execute(statement)
|
|
2397
|
+
elif isinstance(result, Entries):
|
|
2398
|
+
entry_attachment_subpath = f"{subpath}/{meta.shortname}".replace('//', '/')
|
|
2399
|
+
statement = delete(Attachments) \
|
|
2400
|
+
.where(col(Attachments.space_name) == space_name) \
|
|
2401
|
+
.where(col(Attachments.subpath).startswith(entry_attachment_subpath))
|
|
2402
|
+
await session.execute(statement)
|
|
2403
|
+
|
|
2404
|
+
# Refresh authz MVs only when Users/Roles/Permissions changed
|
|
2405
|
+
# try:
|
|
2406
|
+
# if meta.__class__ in (core.User, core.Role, core.Permission):
|
|
2407
|
+
# await self.ensure_authz_materialized_views_fresh()
|
|
2408
|
+
# except Exception as _e:
|
|
2409
|
+
# logger.warning(f"AuthZ MV refresh after delete skipped: {_e}")
|
|
2410
|
+
except Exception as e:
|
|
2411
|
+
print("[!delete]", e)
|
|
2412
|
+
logger.error(f"Failed parsing an entry. Error: {e}")
|
|
2413
|
+
raise api.Exception(
|
|
2414
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2415
|
+
error=api.Error(
|
|
2416
|
+
type="delete",
|
|
2417
|
+
code=InternalErrorCode.SOMETHING_WRONG,
|
|
2418
|
+
message="failed to delete entry",
|
|
2419
|
+
),
|
|
2420
|
+
)
|
|
2421
|
+
|
|
2422
|
+
async def lock_handler(self, space_name: str, subpath: str, shortname: str, user_shortname: str,
|
|
2423
|
+
action: LockAction) -> dict | None:
|
|
2424
|
+
if not subpath.startswith("/"):
|
|
2425
|
+
subpath = f"/{subpath}"
|
|
2426
|
+
|
|
2427
|
+
async with self.get_session() as session:
|
|
2428
|
+
match action:
|
|
2429
|
+
case LockAction.lock:
|
|
2430
|
+
statement = select(Locks).where(Locks.space_name == space_name) \
|
|
2431
|
+
.where(Locks.subpath == subpath) \
|
|
2432
|
+
.where(Locks.shortname == shortname)
|
|
2433
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2434
|
+
if result:
|
|
2435
|
+
raise api.Exception(
|
|
2436
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
2437
|
+
error=api.Error(
|
|
2438
|
+
type="lock",
|
|
2439
|
+
code=InternalErrorCode.LOCKED_ENTRY,
|
|
2440
|
+
message="entry already locked already exists!",
|
|
2441
|
+
)
|
|
2442
|
+
)
|
|
2443
|
+
|
|
2444
|
+
lock = Locks(
|
|
2445
|
+
uuid=uuid4(),
|
|
2446
|
+
space_name=space_name,
|
|
2447
|
+
subpath=subpath,
|
|
2448
|
+
shortname=shortname,
|
|
2449
|
+
owner_shortname=user_shortname,
|
|
2450
|
+
)
|
|
2451
|
+
session.add(lock)
|
|
2452
|
+
await session.commit()
|
|
2453
|
+
await session.refresh(lock)
|
|
2454
|
+
return lock.model_dump()
|
|
2455
|
+
case LockAction.fetch:
|
|
2456
|
+
lock_payload = (await self.load(
|
|
2457
|
+
space_name=space_name,
|
|
2458
|
+
subpath=subpath,
|
|
2459
|
+
shortname=shortname,
|
|
2460
|
+
class_type=core.Lock,
|
|
2461
|
+
user_shortname=user_shortname,
|
|
2462
|
+
)).model_dump()
|
|
2463
|
+
return lock_payload
|
|
2464
|
+
case LockAction.unlock:
|
|
2465
|
+
statement2 = delete(Locks) \
|
|
2466
|
+
.where(col(Locks.space_name) == space_name) \
|
|
2467
|
+
.where(col(Locks.subpath) == subpath) \
|
|
2468
|
+
.where(col(Locks.shortname) == shortname)
|
|
2469
|
+
await session.execute(statement2)
|
|
2470
|
+
await session.commit()
|
|
2471
|
+
return None
|
|
2472
|
+
|
|
2473
|
+
async def fetch_space(self, space_name: str) -> core.Space | None:
|
|
2474
|
+
try:
|
|
2475
|
+
return await self.load(space_name, "/", space_name, core.Space)
|
|
2476
|
+
except Exception as e:
|
|
2477
|
+
print("[!fetch_space]", e, space_name)
|
|
2478
|
+
return None
|
|
2479
|
+
|
|
2480
|
+
async def set_user_session(self, user_shortname: str, token: str) -> bool:
|
|
2481
|
+
try:
|
|
2482
|
+
total, last_session = await self.get_user_session(user_shortname, token)
|
|
2483
|
+
|
|
2484
|
+
if (settings.max_sessions_per_user == 1 and last_session is not None) \
|
|
2485
|
+
or (settings.max_sessions_per_user != 0 and total >= settings.max_sessions_per_user):
|
|
2486
|
+
await self.remove_user_session(user_shortname)
|
|
2487
|
+
|
|
2488
|
+
timestamp = datetime.now()
|
|
2489
|
+
async with self.get_session() as session:
|
|
2490
|
+
session.add(
|
|
2491
|
+
Sessions(
|
|
2492
|
+
uuid=uuid4(),
|
|
2493
|
+
shortname=user_shortname,
|
|
2494
|
+
token=hash_password(token),
|
|
2495
|
+
timestamp=timestamp,
|
|
2496
|
+
)
|
|
2497
|
+
)
|
|
2498
|
+
|
|
2499
|
+
return True
|
|
2500
|
+
except Exception as e:
|
|
2501
|
+
print("[!set_sql_user_session]", e)
|
|
2502
|
+
return False
|
|
2503
|
+
|
|
2504
|
+
async def get_user_session(self, user_shortname: str, token: str) -> Tuple[int, str | None]:
|
|
2505
|
+
async with self.get_session() as session:
|
|
2506
|
+
statement = select(Sessions) \
|
|
2507
|
+
.where(col(Sessions.shortname) == user_shortname)
|
|
2508
|
+
|
|
2509
|
+
results = (await session.execute(statement)).all()
|
|
2510
|
+
results = [result[0] for result in results]
|
|
2511
|
+
|
|
2512
|
+
if len(results) == 0:
|
|
2513
|
+
return 0, None
|
|
2514
|
+
|
|
2515
|
+
for r in results:
|
|
2516
|
+
if settings.session_inactivity_ttl + r.timestamp.timestamp() < time.time():
|
|
2517
|
+
await session.execute(delete(Sessions).where(col(Sessions.uuid) == r.uuid))
|
|
2518
|
+
continue
|
|
2519
|
+
if verify_password(token, r.token):
|
|
2520
|
+
r.timestamp = datetime.now()
|
|
2521
|
+
session.add(r)
|
|
2522
|
+
await session.commit()
|
|
2523
|
+
return len(results), token
|
|
2524
|
+
# else:
|
|
2525
|
+
# await session.execute(delete(Sessions).where(col(Sessions.uuid) == r.uuid))
|
|
2526
|
+
return len(results), None
|
|
2527
|
+
|
|
2528
|
+
async def remove_user_session(self, user_shortname: str) -> bool:
|
|
2529
|
+
async with self.get_session() as session:
|
|
2530
|
+
try:
|
|
2531
|
+
statement = select(Sessions).where(col(Sessions.shortname) == user_shortname).order_by(
|
|
2532
|
+
col(Sessions.timestamp).desc()
|
|
2533
|
+
).offset(settings.max_sessions_per_user - 1)
|
|
2534
|
+
oldest_sessions = (await session.execute(statement)).all()
|
|
2535
|
+
oldest_sessions = [oldest_session[0] for oldest_session in oldest_sessions]
|
|
2536
|
+
for oldest_session in oldest_sessions:
|
|
2537
|
+
await session.delete(oldest_session)
|
|
2538
|
+
await session.commit()
|
|
2539
|
+
return True
|
|
2540
|
+
except Exception as e:
|
|
2541
|
+
print("[!remove_sql_user_session]", e)
|
|
2542
|
+
return False
|
|
2543
|
+
|
|
2544
|
+
async def set_invitation(self, invitation_token: str, invitation_value):
|
|
2545
|
+
async with self.get_session() as session:
|
|
2546
|
+
timestamp = datetime.now()
|
|
2547
|
+
try:
|
|
2548
|
+
session.add(
|
|
2549
|
+
Invitations(
|
|
2550
|
+
uuid=uuid4(),
|
|
2551
|
+
invitation_token=invitation_token,
|
|
2552
|
+
invitation_value=invitation_value,
|
|
2553
|
+
timestamp=timestamp,
|
|
2554
|
+
)
|
|
2555
|
+
)
|
|
2556
|
+
except Exception as e:
|
|
2557
|
+
print("[!set_invitation]", e)
|
|
2558
|
+
|
|
2559
|
+
async def get_invitation(self, invitation_token: str) -> str | None:
|
|
2560
|
+
async with self.get_session() as session:
|
|
2561
|
+
statement = select(Invitations).where(col(Invitations.invitation_token) == invitation_token)
|
|
2562
|
+
|
|
2563
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2564
|
+
if result is None:
|
|
2565
|
+
return None
|
|
2566
|
+
result = result[0]
|
|
2567
|
+
user_session = Invitations.model_validate(result)
|
|
2568
|
+
|
|
2569
|
+
return user_session.invitation_value
|
|
2570
|
+
|
|
2571
|
+
async def delete_invitation(self, invitation_token: str) -> bool:
|
|
2572
|
+
async with self.get_session() as session:
|
|
2573
|
+
try:
|
|
2574
|
+
statement = delete(Invitations).where(col(Invitations.invitation_token) == invitation_token)
|
|
2575
|
+
await session.execute(statement)
|
|
2576
|
+
return True
|
|
2577
|
+
except Exception as e:
|
|
2578
|
+
print("[!remove_sql_user_session]", e)
|
|
2579
|
+
return False
|
|
2580
|
+
|
|
2581
|
+
async def set_url_shortner(self, token_uuid: str, url: str):
|
|
2582
|
+
async with self.get_session() as session:
|
|
2583
|
+
try:
|
|
2584
|
+
session.add(
|
|
2585
|
+
URLShorts(
|
|
2586
|
+
uuid=uuid4(),
|
|
2587
|
+
token_uuid=token_uuid,
|
|
2588
|
+
url=url,
|
|
2589
|
+
timestamp=datetime.now(),
|
|
2590
|
+
)
|
|
2591
|
+
)
|
|
2592
|
+
except Exception as e:
|
|
2593
|
+
print("[!set_url_shortner]", e)
|
|
2594
|
+
|
|
2595
|
+
async def get_url_shortner(self, token_uuid: str) -> str | None:
|
|
2596
|
+
async with self.get_session() as session:
|
|
2597
|
+
statement = select(URLShorts).where(URLShorts.token_uuid == token_uuid)
|
|
2598
|
+
|
|
2599
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2600
|
+
if result is None:
|
|
2601
|
+
return None
|
|
2602
|
+
result = result[0]
|
|
2603
|
+
url_shortner = URLShorts.model_validate(result)
|
|
2604
|
+
if settings.url_shorter_expires + url_shortner.timestamp.timestamp() < time.time():
|
|
2605
|
+
await self.delete_url_shortner(token_uuid)
|
|
2606
|
+
return None
|
|
2607
|
+
|
|
2608
|
+
return url_shortner.url
|
|
2609
|
+
|
|
2610
|
+
async def delete_url_shortner(self, token_uuid: str) -> bool:
|
|
2611
|
+
async with self.get_session() as session:
|
|
2612
|
+
try:
|
|
2613
|
+
statement = delete(URLShorts).where(col(URLShorts.token_uuid) == token_uuid)
|
|
2614
|
+
await session.execute(statement)
|
|
2615
|
+
return True
|
|
2616
|
+
except Exception as e:
|
|
2617
|
+
print("[!remove_sql_user_session]", e)
|
|
2618
|
+
return False
|
|
2619
|
+
|
|
2620
|
+
async def delete_url_shortner_by_token(self, invitation_token: str) -> bool:
|
|
2621
|
+
async with self.get_session() as session:
|
|
2622
|
+
try:
|
|
2623
|
+
statement = delete(URLShorts).where(col(URLShorts.url).ilike(f"%{invitation_token}%"))
|
|
2624
|
+
await session.execute(statement)
|
|
2625
|
+
return True
|
|
2626
|
+
except Exception as e:
|
|
2627
|
+
print("[!delete_url_shortner_by_token]", e)
|
|
2628
|
+
return False
|
|
2629
|
+
|
|
2630
|
+
async def _set_query_final_results(self, query, results):
|
|
2631
|
+
is_aggregation = query.type == QueryType.aggregation
|
|
2632
|
+
is_attachment_query = query.type == QueryType.attachments
|
|
2633
|
+
process_payload = query.type not in [QueryType.history, QueryType.events]
|
|
2634
|
+
|
|
2635
|
+
# Case 1: Attachment query → Direct conversion of all items
|
|
2636
|
+
if is_attachment_query:
|
|
2637
|
+
return [
|
|
2638
|
+
item.to_record(item.subpath, item.shortname)
|
|
2639
|
+
for item in results
|
|
2640
|
+
]
|
|
2641
|
+
|
|
2642
|
+
# Case 2: Aggregation query → delegate to existing aggregator
|
|
2643
|
+
if is_aggregation:
|
|
2644
|
+
for idx, item in enumerate(results):
|
|
2645
|
+
results = set_results_from_aggregation(query, item, results, idx)
|
|
2646
|
+
return results
|
|
2647
|
+
|
|
2648
|
+
# Case 3: Standard query → convert and optionally fetch attachments
|
|
2649
|
+
attachment_tasks = []
|
|
2650
|
+
attachment_indices = []
|
|
2651
|
+
|
|
2652
|
+
for idx, item in enumerate(results):
|
|
2653
|
+
rec = item.to_record(item.subpath, item.shortname)
|
|
2654
|
+
results[idx] = rec
|
|
2655
|
+
|
|
2656
|
+
if process_payload:
|
|
2657
|
+
# Strip payload body early (if disabled)
|
|
2658
|
+
if not query.retrieve_json_payload:
|
|
2659
|
+
payload = rec.attributes.get("payload", {})
|
|
2660
|
+
if payload and payload.get("body"):
|
|
2661
|
+
payload["body"] = None
|
|
2662
|
+
|
|
2663
|
+
# Queue attachments if requested
|
|
2664
|
+
if query.retrieve_attachments:
|
|
2665
|
+
attachment_tasks.append(
|
|
2666
|
+
self.get_entry_attachments(
|
|
2667
|
+
rec.subpath,
|
|
2668
|
+
Path(f"{query.space_name}/{rec.shortname}"),
|
|
2669
|
+
retrieve_json_payload=True,
|
|
2670
|
+
)
|
|
2671
|
+
)
|
|
2672
|
+
attachment_indices.append(idx)
|
|
2673
|
+
|
|
2674
|
+
# Run all attachment retrievals concurrently
|
|
2675
|
+
if attachment_tasks:
|
|
2676
|
+
attachments_list = await asyncio.gather(*attachment_tasks)
|
|
2677
|
+
for idx, attachments in zip(attachment_indices, attachments_list):
|
|
2678
|
+
results[idx].attachments = attachments
|
|
2679
|
+
|
|
2680
|
+
return results
|
|
2681
|
+
|
|
2682
|
+
async def clear_failed_password_attempts(self, user_shortname: str) -> bool:
|
|
2683
|
+
async with self.get_session() as session:
|
|
2684
|
+
try:
|
|
2685
|
+
statement = select(Users).where(Users.shortname == user_shortname)
|
|
2686
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2687
|
+
if result is None:
|
|
2688
|
+
return False
|
|
2689
|
+
result = result[0]
|
|
2690
|
+
result.attempt_count = 0
|
|
2691
|
+
session.add(result)
|
|
2692
|
+
return True
|
|
2693
|
+
except Exception as e:
|
|
2694
|
+
print("[!clear_failed_password_attempts]", e)
|
|
2695
|
+
return False
|
|
2696
|
+
|
|
2697
|
+
async def get_failed_password_attempt_count(self, user_shortname: str) -> int:
|
|
2698
|
+
async with self.get_session() as session:
|
|
2699
|
+
statement = select(Users).where(col(Users.shortname) == user_shortname)
|
|
2700
|
+
|
|
2701
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2702
|
+
if result is None:
|
|
2703
|
+
return 0
|
|
2704
|
+
result = result[0]
|
|
2705
|
+
failed_login_attempt = Users.model_validate(result)
|
|
2706
|
+
return 0 if failed_login_attempt.attempt_count is None else failed_login_attempt.attempt_count
|
|
2707
|
+
|
|
2708
|
+
async def set_failed_password_attempt_count(self, user_shortname: str, attempt_count: int) -> bool:
|
|
2709
|
+
async with self.get_session() as session:
|
|
2710
|
+
try:
|
|
2711
|
+
statement = select(Users).where(col(Users.shortname) == user_shortname)
|
|
2712
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2713
|
+
if result is None:
|
|
2714
|
+
return False
|
|
2715
|
+
result = result[0]
|
|
2716
|
+
result.attempt_count = attempt_count
|
|
2717
|
+
session.add(result)
|
|
2718
|
+
return True
|
|
2719
|
+
except Exception as e:
|
|
2720
|
+
print("[!set_failed_password_attempt_count]", e)
|
|
2721
|
+
return False
|
|
2722
|
+
|
|
2723
|
+
async def get_spaces(self) -> dict:
|
|
2724
|
+
async with self.get_session() as session:
|
|
2725
|
+
statement = select(Spaces)
|
|
2726
|
+
results = (await session.execute(statement)).all()
|
|
2727
|
+
results = [result[0] for result in results]
|
|
2728
|
+
spaces = {}
|
|
2729
|
+
for idx, item in enumerate(results):
|
|
2730
|
+
space = Spaces.model_validate(item)
|
|
2731
|
+
spaces[space.shortname] = space.model_dump()
|
|
2732
|
+
return spaces
|
|
2733
|
+
|
|
2734
|
+
async def get_media_attachment(self, space_name: str, subpath: str, shortname: str) -> io.BytesIO | None:
|
|
2735
|
+
if not subpath.startswith("/"):
|
|
2736
|
+
subpath = f"/{subpath}"
|
|
2737
|
+
|
|
2738
|
+
async with self.get_session() as session:
|
|
2739
|
+
statement = select(Attachments.media) \
|
|
2740
|
+
.where(Attachments.space_name == space_name) \
|
|
2741
|
+
.where(Attachments.subpath == subpath) \
|
|
2742
|
+
.where(Attachments.shortname == shortname)
|
|
2743
|
+
|
|
2744
|
+
result = (await session.execute(statement)).one_or_none()
|
|
2745
|
+
if result:
|
|
2746
|
+
result = result[0]
|
|
2747
|
+
return io.BytesIO(result)
|
|
2748
|
+
return None
|
|
2749
|
+
|
|
2750
|
+
async def validate_uniqueness(
|
|
2751
|
+
self, space_name: str, record: core.Record, action: str = api.RequestType.create, user_shortname=None
|
|
2752
|
+
) -> bool:
|
|
2753
|
+
"""
|
|
2754
|
+
Get list of unique fields from entry's folder meta data
|
|
2755
|
+
ensure that each sub-list in the list is unique across all entries
|
|
2756
|
+
"""
|
|
2757
|
+
parent_subpath, folder_shortname = os.path.split(record.subpath)
|
|
2758
|
+
folder_meta = None
|
|
2759
|
+
try:
|
|
2760
|
+
folder_meta = await self.load(space_name, parent_subpath, folder_shortname, core.Folder)
|
|
2761
|
+
except Exception:
|
|
2762
|
+
folder_meta = None
|
|
2763
|
+
|
|
2764
|
+
if folder_meta is None or folder_meta.payload is None or not isinstance(folder_meta.payload.body,
|
|
2765
|
+
dict) or not isinstance(
|
|
2766
|
+
folder_meta.payload.body.get("unique_fields", None), list): # type: ignore
|
|
2767
|
+
return True
|
|
2768
|
+
|
|
2769
|
+
current_user = None
|
|
2770
|
+
if action is api.RequestType.update and record.resource_type is ResourceType.user:
|
|
2771
|
+
current_user = await self.load(space_name, record.subpath, record.shortname, core.User)
|
|
2772
|
+
|
|
2773
|
+
for compound in folder_meta.payload.body["unique_fields"]: # type: ignore
|
|
2774
|
+
query_string = ""
|
|
2775
|
+
for composite_unique_key in compound:
|
|
2776
|
+
value = get_nested_value(record.attributes, composite_unique_key)
|
|
2777
|
+
if value is None or value == "":
|
|
2778
|
+
continue
|
|
2779
|
+
if current_user is not None and hasattr(current_user, composite_unique_key) \
|
|
2780
|
+
and getattr(current_user, composite_unique_key) == value:
|
|
2781
|
+
continue
|
|
2782
|
+
|
|
2783
|
+
query_string += f"@{composite_unique_key}:{value} "
|
|
2784
|
+
|
|
2785
|
+
if query_string == "":
|
|
2786
|
+
continue
|
|
2787
|
+
|
|
2788
|
+
q = api.Query(
|
|
2789
|
+
space_name=space_name,
|
|
2790
|
+
subpath=record.subpath,
|
|
2791
|
+
type=QueryType.subpath,
|
|
2792
|
+
search=query_string
|
|
2793
|
+
)
|
|
2794
|
+
owner = record.attributes.get("owner_shortname", None) if user_shortname is None else user_shortname
|
|
2795
|
+
total, _ = await self.query(q, owner)
|
|
2796
|
+
|
|
2797
|
+
if total != 0:
|
|
2798
|
+
raise API_Exception(
|
|
2799
|
+
status.HTTP_400_BAD_REQUEST,
|
|
2800
|
+
API_Error(
|
|
2801
|
+
type="request",
|
|
2802
|
+
code=InternalErrorCode.DATA_SHOULD_BE_UNIQUE,
|
|
2803
|
+
message=f"Entry properties should be unique: {query_string}",
|
|
2804
|
+
),
|
|
2805
|
+
)
|
|
2806
|
+
return True
|
|
2807
|
+
|
|
2808
|
+
async def validate_payload_with_schema(
|
|
2809
|
+
self,
|
|
2810
|
+
payload_data: UploadFile | dict,
|
|
2811
|
+
space_name: str,
|
|
2812
|
+
schema_shortname: str,
|
|
2813
|
+
):
|
|
2814
|
+
if not isinstance(payload_data, (dict, UploadFile)):
|
|
2815
|
+
raise API_Exception(
|
|
2816
|
+
status.HTTP_400_BAD_REQUEST,
|
|
2817
|
+
API_Error(
|
|
2818
|
+
type="request",
|
|
2819
|
+
code=InternalErrorCode.INVALID_DATA,
|
|
2820
|
+
message="Invalid payload.body",
|
|
2821
|
+
),
|
|
2822
|
+
)
|
|
2823
|
+
|
|
2824
|
+
if schema_shortname in ["folder_rendering", "meta_schema"]:
|
|
2825
|
+
space_name = "management"
|
|
2826
|
+
schema = await self.load(space_name, "/schema", schema_shortname, core.Schema)
|
|
2827
|
+
if schema.payload:
|
|
2828
|
+
schema = schema.payload.model_dump()['body']
|
|
2829
|
+
|
|
2830
|
+
if not isinstance(payload_data, dict):
|
|
2831
|
+
data = json.load(payload_data.file)
|
|
2832
|
+
payload_data.file.seek(0)
|
|
2833
|
+
else:
|
|
2834
|
+
data = payload_data
|
|
2835
|
+
|
|
2836
|
+
Draft7Validator(schema).validate(data) # type: ignore
|
|
2837
|
+
|
|
2838
|
+
async def get_schema(self, space_name: str, schema_shortname: str, owner_shortname: str) -> dict:
|
|
2839
|
+
schema_content = await self.load(
|
|
2840
|
+
space_name=space_name,
|
|
2841
|
+
subpath="/schema",
|
|
2842
|
+
shortname=schema_shortname,
|
|
2843
|
+
class_type=core.Schema,
|
|
2844
|
+
user_shortname=owner_shortname,
|
|
2845
|
+
)
|
|
2846
|
+
|
|
2847
|
+
if schema_content and schema_content.payload and isinstance(schema_content.payload.body, dict):
|
|
2848
|
+
return resolve_schema_references(schema_content.payload.body)
|
|
2849
|
+
|
|
2850
|
+
return {}
|
|
2851
|
+
|
|
2852
|
+
async def check_uniqueness(self, unique_fields, search_str, redis_escape_chars) -> dict:
|
|
2853
|
+
for key, value in unique_fields.items():
|
|
2854
|
+
if value is None:
|
|
2855
|
+
continue
|
|
2856
|
+
if key == "email_unescaped":
|
|
2857
|
+
key = "email"
|
|
2858
|
+
|
|
2859
|
+
result = await self.get_entry_by_criteria({key: value}, Users)
|
|
2860
|
+
|
|
2861
|
+
if result is not None:
|
|
2862
|
+
return {"unique": False, "field": key}
|
|
2863
|
+
|
|
2864
|
+
return {"unique": True}
|
|
2865
|
+
|
|
2866
|
+
# async def ensure_authz_materialized_views_fresh(self) -> None:
|
|
2867
|
+
# try:
|
|
2868
|
+
# async with self.get_session() as session:
|
|
2869
|
+
# latest_q = text(
|
|
2870
|
+
# """
|
|
2871
|
+
# SELECT GREATEST(
|
|
2872
|
+
# COALESCE((SELECT MAX(updated_at) FROM users), to_timestamp(0)),
|
|
2873
|
+
# COALESCE((SELECT MAX(updated_at) FROM roles), to_timestamp(0)),
|
|
2874
|
+
# COALESCE((SELECT MAX(updated_at) FROM permissions), to_timestamp(0))
|
|
2875
|
+
# ) AS max_ts
|
|
2876
|
+
# """
|
|
2877
|
+
# )
|
|
2878
|
+
# latest_ts_row = (await session.execute(latest_q)).one()
|
|
2879
|
+
# max_ts = latest_ts_row[0]
|
|
2880
|
+
#
|
|
2881
|
+
# meta_row = (
|
|
2882
|
+
# await session.execute(text("SELECT last_source_ts FROM authz_mv_meta WHERE id = 1"))).one_or_none()
|
|
2883
|
+
# if meta_row is None or (meta_row[0] is None) or (max_ts is not None and max_ts > meta_row[0]):
|
|
2884
|
+
# await session.execute(text("REFRESH MATERIALIZED VIEW mv_user_roles"))
|
|
2885
|
+
# await session.execute(text("REFRESH MATERIALIZED VIEW mv_role_permissions"))
|
|
2886
|
+
# await session.execute(text("""
|
|
2887
|
+
# INSERT INTO authz_mv_meta(id, last_source_ts, refreshed_at)
|
|
2888
|
+
# VALUES (1, :ts, now())
|
|
2889
|
+
# ON CONFLICT (id)
|
|
2890
|
+
# DO UPDATE SET last_source_ts = EXCLUDED.last_source_ts,
|
|
2891
|
+
# refreshed_at = now()
|
|
2892
|
+
# """), {"ts": max_ts})
|
|
2893
|
+
# except Exception as e:
|
|
2894
|
+
# logger.warning(f"AuthZ MV refresh failed or skipped: {e}")
|
|
2895
|
+
#
|
|
2896
|
+
# async def _bulk_load_by_shortnames(self, class_type: Type[MetaChild], shortnames: list[str]) -> dict[
|
|
2897
|
+
# str, MetaChild]:
|
|
2898
|
+
# if not shortnames:
|
|
2899
|
+
# return {}
|
|
2900
|
+
# table = self.get_table(class_type)
|
|
2901
|
+
# items: dict[str, MetaChild] = {}
|
|
2902
|
+
# async with self.get_session() as session:
|
|
2903
|
+
# res = await session.execute(
|
|
2904
|
+
# select(table).where(col(table.shortname).in_(shortnames))
|
|
2905
|
+
# )
|
|
2906
|
+
# rows = [r[0] for r in res.all()]
|
|
2907
|
+
# for row in rows:
|
|
2908
|
+
# model_obj = class_type.model_validate(row.model_dump())
|
|
2909
|
+
# items[getattr(row, 'shortname')] = model_obj
|
|
2910
|
+
# return items
|
|
2911
|
+
|
|
2912
|
+
async def get_role_permissions(self, role: core.Role) -> list[core.Permission]:
|
|
2913
|
+
role_records = await self.load_or_none(
|
|
2914
|
+
settings.management_space, 'roles', role.shortname, core.Role
|
|
2915
|
+
)
|
|
2916
|
+
if role_records is None:
|
|
2917
|
+
return []
|
|
2918
|
+
role_permissions: list[core.Permission] = []
|
|
2919
|
+
for permission in role_records.permissions:
|
|
2920
|
+
permission_record = await self.load_or_none(
|
|
2921
|
+
settings.management_space, 'permissions', permission, core.Permission
|
|
2922
|
+
)
|
|
2923
|
+
if permission_record is None:
|
|
2924
|
+
continue
|
|
2925
|
+
role_permissions.append(permission_record)
|
|
2926
|
+
return role_permissions
|
|
2927
|
+
|
|
2928
|
+
async def get_user_roles(self, user_shortname: str) -> dict[str, core.Role]:
|
|
2929
|
+
try:
|
|
2930
|
+
user = await self.load_or_none(
|
|
2931
|
+
settings.management_space, settings.users_subpath, user_shortname, core.User
|
|
2932
|
+
)
|
|
2933
|
+
if user is None:
|
|
2934
|
+
return {}
|
|
2935
|
+
euser_roles: dict[str, core.Role] = {}
|
|
2936
|
+
if user_shortname != "anonymous":
|
|
2937
|
+
role_record = await self.load_or_none(
|
|
2938
|
+
settings.management_space, 'roles', 'logged_in', core.Role
|
|
2939
|
+
)
|
|
2940
|
+
if role_record is not None:
|
|
2941
|
+
euser_roles['logged_in'] = role_record
|
|
2942
|
+
for role in user.roles:
|
|
2943
|
+
role_record = await self.load_or_none(
|
|
2944
|
+
settings.management_space, 'roles', role, core.Role
|
|
2945
|
+
)
|
|
2946
|
+
if role_record is None:
|
|
2947
|
+
continue
|
|
2948
|
+
euser_roles[role] = role_record
|
|
2949
|
+
return euser_roles
|
|
2950
|
+
except Exception as e2:
|
|
2951
|
+
print(f"Error: {e2}")
|
|
2952
|
+
return {}
|
|
2953
|
+
|
|
2954
|
+
async def load_user_meta(self, user_shortname: str) -> Any:
|
|
2955
|
+
user = await self.load(
|
|
2956
|
+
space_name=settings.management_space,
|
|
2957
|
+
shortname=user_shortname,
|
|
2958
|
+
subpath="users",
|
|
2959
|
+
class_type=core.User,
|
|
2960
|
+
user_shortname=user_shortname,
|
|
2961
|
+
)
|
|
2962
|
+
|
|
2963
|
+
return user
|
|
2964
|
+
|
|
2965
|
+
async def generate_user_permissions(self, user_shortname: str) -> dict:
|
|
2966
|
+
user_permissions: dict = {}
|
|
2967
|
+
|
|
2968
|
+
user_roles = await self.get_user_roles(user_shortname)
|
|
2969
|
+
|
|
2970
|
+
for _, role in user_roles.items():
|
|
2971
|
+
role_permissions = await self.get_role_permissions(role)
|
|
2972
|
+
if user_shortname == "anonymous":
|
|
2973
|
+
permission_world_record = await self.load_or_none(settings.management_space, 'permissions', "world",
|
|
2974
|
+
core.Permission)
|
|
2975
|
+
if permission_world_record:
|
|
2976
|
+
role_permissions.append(permission_world_record)
|
|
2977
|
+
|
|
2978
|
+
for permission in role_permissions:
|
|
2979
|
+
for space_name, permission_subpaths in permission.subpaths.items():
|
|
2980
|
+
for permission_subpath in permission_subpaths:
|
|
2981
|
+
permission_subpath = trans_magic_words(permission_subpath, user_shortname)
|
|
2982
|
+
for permission_resource_types in permission.resource_types:
|
|
2983
|
+
actions = set(permission.actions)
|
|
2984
|
+
conditions = set(permission.conditions)
|
|
2985
|
+
if (
|
|
2986
|
+
f"{space_name}:{permission_subpath}:{permission_resource_types}"
|
|
2987
|
+
in user_permissions
|
|
2988
|
+
):
|
|
2989
|
+
old_perm = user_permissions[
|
|
2990
|
+
f"{space_name}:{permission_subpath}:{permission_resource_types}"
|
|
2991
|
+
]
|
|
2992
|
+
|
|
2993
|
+
if isinstance(actions, list):
|
|
2994
|
+
actions = set(actions)
|
|
2995
|
+
actions |= set(old_perm["allowed_actions"])
|
|
2996
|
+
|
|
2997
|
+
if isinstance(conditions, list):
|
|
2998
|
+
conditions = set(conditions)
|
|
2999
|
+
conditions |= set(old_perm["conditions"])
|
|
3000
|
+
|
|
3001
|
+
user_permissions[
|
|
3002
|
+
f"{space_name}:{permission_subpath}:{permission_resource_types}"
|
|
3003
|
+
] = {
|
|
3004
|
+
"allowed_actions": list(actions),
|
|
3005
|
+
"conditions": list(conditions),
|
|
3006
|
+
"restricted_fields": permission.restricted_fields,
|
|
3007
|
+
"allowed_fields_values": permission.allowed_fields_values,
|
|
3008
|
+
"filter_fields_values": permission.filter_fields_values
|
|
3009
|
+
}
|
|
3010
|
+
return user_permissions
|
|
3011
|
+
|
|
3012
|
+
async def get_user_permissions(self, user_shortname: str) -> dict:
|
|
3013
|
+
return await self.generate_user_permissions(user_shortname)
|
|
3014
|
+
|
|
3015
|
+
async def get_user_by_criteria(self, key: str, value: str) -> str | None:
|
|
3016
|
+
_user = await self.get_entry_by_criteria(
|
|
3017
|
+
{key: value},
|
|
3018
|
+
Users
|
|
3019
|
+
)
|
|
3020
|
+
if _user is None:
|
|
3021
|
+
return None
|
|
3022
|
+
return str(_user.shortname)
|
|
3023
|
+
|
|
3024
|
+
async def get_payload_from_event(self, event) -> dict:
|
|
3025
|
+
notification_request_meta = await self.load(
|
|
3026
|
+
event.space_name,
|
|
3027
|
+
event.subpath,
|
|
3028
|
+
event.shortname,
|
|
3029
|
+
getattr(sys_modules["models.core"], camel_case(event.resource_type)),
|
|
3030
|
+
event.user_shortname,
|
|
3031
|
+
)
|
|
3032
|
+
return notification_request_meta.payload.body # type: ignore
|
|
3033
|
+
|
|
3034
|
+
async def get_user_roles_from_groups(self, user_meta: core.User) -> list:
|
|
3035
|
+
return []
|
|
3036
|
+
|
|
3037
|
+
async def drop_index(self, space_name):
|
|
3038
|
+
pass
|
|
3039
|
+
|
|
3040
|
+
async def initialize_spaces(self) -> None:
|
|
3041
|
+
async with self.get_session() as session:
|
|
3042
|
+
try:
|
|
3043
|
+
(await session.execute(select(Spaces).limit(1))).one_or_none()
|
|
3044
|
+
except Exception as e:
|
|
3045
|
+
print(f"Error: {e}")
|
|
3046
|
+
try:
|
|
3047
|
+
loop = asyncio.get_event_loop()
|
|
3048
|
+
loop.stop()
|
|
3049
|
+
except RuntimeError as e:
|
|
3050
|
+
print(f"Error: {e}")
|
|
3051
|
+
|
|
3052
|
+
async def create_user_premission_index(self) -> None:
|
|
3053
|
+
pass
|
|
3054
|
+
|
|
3055
|
+
async def store_modules_to_redis(self, roles, groups, permissions) -> None:
|
|
3056
|
+
pass
|
|
3057
|
+
|
|
3058
|
+
async def delete_user_permissions_map_in_redis(self) -> None:
|
|
3059
|
+
pass
|
|
3060
|
+
|
|
3061
|
+
async def internal_save_model(
|
|
3062
|
+
self,
|
|
3063
|
+
space_name: str,
|
|
3064
|
+
subpath: str,
|
|
3065
|
+
meta: core.Meta,
|
|
3066
|
+
payload: dict | None = None
|
|
3067
|
+
):
|
|
3068
|
+
await self.save(
|
|
3069
|
+
space_name=space_name,
|
|
3070
|
+
subpath=subpath,
|
|
3071
|
+
meta=meta,
|
|
3072
|
+
)
|
|
3073
|
+
|
|
3074
|
+
async def internal_sys_update_model(
|
|
3075
|
+
self,
|
|
3076
|
+
space_name: str,
|
|
3077
|
+
subpath: str,
|
|
3078
|
+
meta: core.Meta,
|
|
3079
|
+
updates: dict,
|
|
3080
|
+
sync_redis: bool = True,
|
|
3081
|
+
payload_dict: dict[str, Any] = {},
|
|
3082
|
+
):
|
|
3083
|
+
meta.updated_at = datetime.now()
|
|
3084
|
+
meta_updated = False
|
|
3085
|
+
payload_updated = False
|
|
3086
|
+
|
|
3087
|
+
if not payload_dict:
|
|
3088
|
+
try:
|
|
3089
|
+
if meta.payload and isinstance(meta.payload.body, dict):
|
|
3090
|
+
# Payload body is already loaded
|
|
3091
|
+
payload_dict = meta.payload.body
|
|
3092
|
+
|
|
3093
|
+
elif meta.payload and isinstance(meta.payload.body, str):
|
|
3094
|
+
# Payload body is the filename string
|
|
3095
|
+
mydict = await self.load_resource_payload(
|
|
3096
|
+
space_name, subpath, meta.payload.body, type(meta)
|
|
3097
|
+
)
|
|
3098
|
+
payload_dict = mydict if mydict else {}
|
|
3099
|
+
except Exception:
|
|
3100
|
+
pass
|
|
3101
|
+
|
|
3102
|
+
restricted_fields = [
|
|
3103
|
+
"uuid",
|
|
3104
|
+
"shortname",
|
|
3105
|
+
"created_at",
|
|
3106
|
+
"updated_at",
|
|
3107
|
+
"owner_shortname",
|
|
3108
|
+
"payload",
|
|
3109
|
+
]
|
|
3110
|
+
old_version_flattend = {**meta.model_dump()}
|
|
3111
|
+
for key, value in updates.items():
|
|
3112
|
+
if key in restricted_fields:
|
|
3113
|
+
continue
|
|
3114
|
+
|
|
3115
|
+
if key in meta.model_fields.keys():
|
|
3116
|
+
meta_updated = True
|
|
3117
|
+
meta.__setattr__(key, value)
|
|
3118
|
+
elif payload_dict:
|
|
3119
|
+
payload_dict[key] = value
|
|
3120
|
+
payload_updated = True
|
|
3121
|
+
|
|
3122
|
+
if meta_updated:
|
|
3123
|
+
await self.update(
|
|
3124
|
+
space_name,
|
|
3125
|
+
subpath,
|
|
3126
|
+
meta,
|
|
3127
|
+
old_version_flattend,
|
|
3128
|
+
{**meta.model_dump()},
|
|
3129
|
+
list(updates.keys()),
|
|
3130
|
+
meta.shortname
|
|
3131
|
+
)
|
|
3132
|
+
if payload_updated and meta.payload and meta.payload.schema_shortname:
|
|
3133
|
+
await self.validate_payload_with_schema(
|
|
3134
|
+
payload_dict, space_name, meta.payload.schema_shortname
|
|
3135
|
+
)
|
|
3136
|
+
await self.save_payload_from_json(
|
|
3137
|
+
space_name, subpath, meta, payload_dict
|
|
3138
|
+
)
|
|
3139
|
+
|
|
3140
|
+
async def get_entry_by_var(
|
|
3141
|
+
self,
|
|
3142
|
+
key: str,
|
|
3143
|
+
val: str,
|
|
3144
|
+
logged_in_user,
|
|
3145
|
+
retrieve_json_payload: bool = False,
|
|
3146
|
+
retrieve_attachments: bool = False,
|
|
3147
|
+
retrieve_lock_status: bool = False,
|
|
3148
|
+
) -> core.Record:
|
|
3149
|
+
_result = await self.get_entry_by_criteria({key: val})
|
|
3150
|
+
|
|
3151
|
+
if _result is None:
|
|
3152
|
+
raise api.Exception(
|
|
3153
|
+
status.HTTP_400_BAD_REQUEST,
|
|
3154
|
+
error=api.Error(
|
|
3155
|
+
type="media", code=InternalErrorCode.OBJECT_NOT_FOUND, message="Request object is not available"
|
|
3156
|
+
),
|
|
3157
|
+
)
|
|
3158
|
+
|
|
3159
|
+
from utils.access_control import access_control
|
|
3160
|
+
if not await access_control.check_access(
|
|
3161
|
+
user_shortname=logged_in_user,
|
|
3162
|
+
space_name=_result.attributes['space_name'],
|
|
3163
|
+
subpath=_result.subpath,
|
|
3164
|
+
resource_type=_result.resource_type,
|
|
3165
|
+
action_type=core.ActionType.view,
|
|
3166
|
+
resource_is_active=_result.attributes['is_active'],
|
|
3167
|
+
resource_owner_shortname=_result.attributes['owner_shortname'],
|
|
3168
|
+
resource_owner_group=_result.attributes['owner_group_shortname'],
|
|
3169
|
+
entry_shortname=_result.shortname
|
|
3170
|
+
):
|
|
3171
|
+
raise api.Exception(
|
|
3172
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
3173
|
+
api.Error(
|
|
3174
|
+
type="request",
|
|
3175
|
+
code=InternalErrorCode.NOT_ALLOWED,
|
|
3176
|
+
message="You don't have permission to this action [42]",
|
|
3177
|
+
)
|
|
3178
|
+
)
|
|
3179
|
+
|
|
3180
|
+
return _result
|
|
3181
|
+
|
|
3182
|
+
async def delete_space(self, space_name, record, owner_shortname):
|
|
3183
|
+
resource_obj = core.Meta.from_record(
|
|
3184
|
+
record=record, owner_shortname=owner_shortname
|
|
3185
|
+
)
|
|
3186
|
+
await self.delete(space_name, record.subpath, resource_obj, owner_shortname)
|
|
3187
|
+
os.system(f"rm -r {settings.spaces_folder}/{space_name}")
|
|
3188
|
+
|
|
3189
|
+
async def get_last_updated_entry(
|
|
3190
|
+
self,
|
|
3191
|
+
space_name: str,
|
|
3192
|
+
schema_names: list,
|
|
3193
|
+
retrieve_json_payload: bool,
|
|
3194
|
+
logged_in_user: str,
|
|
3195
|
+
):
|
|
3196
|
+
pass
|
|
3197
|
+
|
|
3198
|
+
async def get_group_users(self, group_name: str):
|
|
3199
|
+
async with self.get_session() as session:
|
|
3200
|
+
statement = select(Users.shortname).where(col(Users.groups).contains([group_name]))
|
|
3201
|
+
result = await session.execute(statement)
|
|
3202
|
+
shortnames = result.scalars().all()
|
|
3203
|
+
return shortnames
|
|
3204
|
+
|
|
3205
|
+
async def is_user_verified(self, user_shortname: str | None, identifier: str | None) -> bool:
|
|
3206
|
+
async with self.get_session() as session:
|
|
3207
|
+
statement = select(Users).where(Users.shortname == user_shortname)
|
|
3208
|
+
result = (await session.execute(statement)).one_or_none()
|
|
3209
|
+
|
|
3210
|
+
if result is None:
|
|
3211
|
+
return False
|
|
3212
|
+
user = Users.model_validate(result[0])
|
|
3213
|
+
|
|
3214
|
+
if identifier == "msisdn":
|
|
3215
|
+
return user.is_msisdn_verified
|
|
3216
|
+
if identifier == "email":
|
|
3217
|
+
return user.is_email_verified
|
|
3218
|
+
return False
|