dmart 1.4.40.post8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dmart/__init__.py +7 -0
- dmart/alembic/README +1 -0
- dmart/alembic/__init__.py +0 -0
- dmart/alembic/env.py +91 -0
- dmart/alembic/notes.txt +11 -0
- dmart/alembic/script.py.mako +28 -0
- dmart/alembic/scripts/__init__.py +0 -0
- dmart/alembic/scripts/calculate_checksums.py +77 -0
- dmart/alembic/scripts/migration_f7a4949eed19.py +28 -0
- dmart/alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- dmart/alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
- dmart/alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- dmart/alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- dmart/alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
- dmart/alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- dmart/alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- dmart/alembic/versions/74288ccbd3b5_initial.py +264 -0
- dmart/alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
- dmart/alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- dmart/alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- dmart/alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- dmart/alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- dmart/alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- dmart/alembic/versions/__init__.py +0 -0
- dmart/alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- dmart/alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- dmart/alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- dmart/alembic.ini +117 -0
- dmart/api/__init__.py +0 -0
- dmart/api/info/__init__.py +0 -0
- dmart/api/info/router.py +109 -0
- dmart/api/managed/__init__.py +0 -0
- dmart/api/managed/router.py +1541 -0
- dmart/api/managed/utils.py +1879 -0
- dmart/api/public/__init__.py +0 -0
- dmart/api/public/router.py +758 -0
- dmart/api/qr/__init__.py +0 -0
- dmart/api/qr/router.py +108 -0
- dmart/api/user/__init__.py +0 -0
- dmart/api/user/model/__init__.py +0 -0
- dmart/api/user/model/errors.py +14 -0
- dmart/api/user/model/requests.py +165 -0
- dmart/api/user/model/responses.py +11 -0
- dmart/api/user/router.py +1413 -0
- dmart/api/user/service.py +270 -0
- dmart/bundler.py +52 -0
- dmart/cli.py +1133 -0
- dmart/config/__init__.py +0 -0
- dmart/config/channels.json +11 -0
- dmart/config/notification.json +17 -0
- dmart/config.env.sample +27 -0
- dmart/config.ini.sample +7 -0
- dmart/conftest.py +13 -0
- dmart/curl.sh +196 -0
- dmart/cxb/__init__.py +0 -0
- dmart/cxb/assets/@codemirror-Rn7_6DkE.js +10 -0
- dmart/cxb/assets/@edraj-CS4NwVbD.js +1 -0
- dmart/cxb/assets/@floating-ui-BwwcF-xh.js +1 -0
- dmart/cxb/assets/@formatjs-yKEsAtjs.js +1 -0
- dmart/cxb/assets/@fortawesome-DRW1UCdr.js +9 -0
- dmart/cxb/assets/@jsonquerylang-laKNoFFq.js +12 -0
- dmart/cxb/assets/@lezer-za4Q-8Ew.js +1 -0
- dmart/cxb/assets/@marijn-DXwl3gUT.js +1 -0
- dmart/cxb/assets/@popperjs-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/@replit--ERk53eB.js +1 -0
- dmart/cxb/assets/@roxi-CGMFK4i8.js +6 -0
- dmart/cxb/assets/@typewriter-cCzskkIv.js +17 -0
- dmart/cxb/assets/@zerodevx-BlBZjKxu.js +1 -0
- dmart/cxb/assets/@zerodevx-CVEpe6WZ.css +1 -0
- dmart/cxb/assets/BreadCrumbLite-DAhOx38v.js +1 -0
- dmart/cxb/assets/EntryRenderer-CCqV8Rkg.js +32 -0
- dmart/cxb/assets/EntryRenderer-DXytdFp9.css +1 -0
- dmart/cxb/assets/ListView-BQelo7vZ.js +16 -0
- dmart/cxb/assets/ListView-U8of-_c-.css +1 -0
- dmart/cxb/assets/Prism--hMplq-p.js +3 -0
- dmart/cxb/assets/Prism-Uh6uStUw.css +1 -0
- dmart/cxb/assets/Table2Cols-BsbwicQm.js +1 -0
- dmart/cxb/assets/_..-BvT6vdHa.css +1 -0
- dmart/cxb/assets/_...404_-fuLH_rX9.js +2 -0
- dmart/cxb/assets/_...fallback_-Ba_NLmAE.js +1 -0
- dmart/cxb/assets/_module-3HrtKAWo.js +3 -0
- dmart/cxb/assets/_module-DFKFq0AM.js +4 -0
- dmart/cxb/assets/_module-Dgq0ZVtz.js +1 -0
- dmart/cxb/assets/ajv-Cpj98o6Y.js +1 -0
- dmart/cxb/assets/axios-CG2WSiiR.js +6 -0
- dmart/cxb/assets/clsx-B-dksMZM.js +1 -0
- dmart/cxb/assets/codemirror-wrapped-line-indent-DPhKvljI.js +1 -0
- dmart/cxb/assets/compare-C3AjiGFR.js +1 -0
- dmart/cxb/assets/compute-scroll-into-view-Bl8rNFhg.js +1 -0
- dmart/cxb/assets/consolite-DlCuI0F9.js +1 -0
- dmart/cxb/assets/crelt-C8TCjufn.js +1 -0
- dmart/cxb/assets/date-fns-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/deepmerge-rn4rBaHU.js +1 -0
- dmart/cxb/assets/dmart_services-AL6-IdDE.js +1 -0
- dmart/cxb/assets/downloadFile-D08i0YDh.js +1 -0
- dmart/cxb/assets/easy-signal-BiPFIK3O.js +1 -0
- dmart/cxb/assets/esm-env-rsSWfq8L.js +1 -0
- dmart/cxb/assets/export-OF_rTiXu.js +1 -0
- dmart/cxb/assets/fast-deep-equal-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/fast-diff-C-IidNf4.js +1 -0
- dmart/cxb/assets/fast-uri-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/flowbite-svelte-BLvjb-sa.js +1 -0
- dmart/cxb/assets/flowbite-svelte-CD54FDqW.css +1 -0
- dmart/cxb/assets/flowbite-svelte-icons-BI8GVhw_.js +1 -0
- dmart/cxb/assets/github-slugger-CQ4oX9Ud.js +1 -0
- dmart/cxb/assets/global-igKv-1g9.js +1 -0
- dmart/cxb/assets/hookar-BMRD9G9H.js +1 -0
- dmart/cxb/assets/immutable-json-patch-DtRO2E_S.js +1 -0
- dmart/cxb/assets/import-1vE3gBat.js +1 -0
- dmart/cxb/assets/index-B-eTh-ZX.js +1 -0
- dmart/cxb/assets/index-BSsK-X71.js +1 -0
- dmart/cxb/assets/index-BVyxzKtH.js +1 -0
- dmart/cxb/assets/index-BdeNM69f.js +1 -0
- dmart/cxb/assets/index-CC-A1ipE.js +1 -0
- dmart/cxb/assets/index-CQohGiYB.js +1 -0
- dmart/cxb/assets/index-ChjnkpdZ.js +4 -0
- dmart/cxb/assets/index-DLP7csA4.js +1 -0
- dmart/cxb/assets/index-DTfhnhwd.js +1 -0
- dmart/cxb/assets/index-DdXRK7n9.js +2 -0
- dmart/cxb/assets/index-DtiCmB4o.js +1 -0
- dmart/cxb/assets/index-NBrXBlLA.css +2 -0
- dmart/cxb/assets/index-X1uNehO7.js +1 -0
- dmart/cxb/assets/index-nrQW6Nrr.js +1 -0
- dmart/cxb/assets/info-B986lRiM.js +1 -0
- dmart/cxb/assets/intl-messageformat-Dc5UU-HB.js +3 -0
- dmart/cxb/assets/jmespath-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/json-schema-traverse-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/json-source-map-DRgZidqy.js +5 -0
- dmart/cxb/assets/jsonpath-plus-l0sNRNKZ.js +1 -0
- dmart/cxb/assets/jsonrepair-B30Dx381.js +8 -0
- dmart/cxb/assets/lodash-es-DZVAA2ox.js +1 -0
- dmart/cxb/assets/marked-DKjyhwJX.js +56 -0
- dmart/cxb/assets/marked-gfm-heading-id-U5zO829x.js +2 -0
- dmart/cxb/assets/marked-mangle-CDMeiHC6.js +1 -0
- dmart/cxb/assets/memoize-one-BdPwpGay.js +1 -0
- dmart/cxb/assets/natural-compare-lite-Bg2Xcf-o.js +7 -0
- dmart/cxb/assets/pagination-svelte-D5CyoiE_.js +13 -0
- dmart/cxb/assets/pagination-svelte-v10nAbbM.css +1 -0
- dmart/cxb/assets/plantuml-encoder-C47mzt9T.js +1 -0
- dmart/cxb/assets/prismjs-DTUiLGJu.js +9 -0
- dmart/cxb/assets/profile-BUf-tKMe.js +1 -0
- dmart/cxb/assets/query-CNmXTsgf.js +1 -0
- dmart/cxb/assets/queryHelpers-C9iBWwqe.js +1 -0
- dmart/cxb/assets/scroll-into-view-if-needed-KR58zyjF.js +1 -0
- dmart/cxb/assets/spaces-0oyGvpii.js +1 -0
- dmart/cxb/assets/style-mod-Bs6eFhZE.js +3 -0
- dmart/cxb/assets/svelte-B2XmcTi_.js +4 -0
- dmart/cxb/assets/svelte-awesome-COLlx0DN.css +1 -0
- dmart/cxb/assets/svelte-awesome-DhnMA6Q_.js +1 -0
- dmart/cxb/assets/svelte-datatables-net-CY7LBj6I.js +1 -0
- dmart/cxb/assets/svelte-floating-ui-BlS3sOAQ.js +1 -0
- dmart/cxb/assets/svelte-i18n-CT2KkQaN.js +3 -0
- dmart/cxb/assets/svelte-jsoneditor-BzfX6Usi.css +1 -0
- dmart/cxb/assets/svelte-jsoneditor-CUGSvWId.js +25 -0
- dmart/cxb/assets/svelte-select-CegQKzqH.css +1 -0
- dmart/cxb/assets/svelte-select-CjHAt_85.js +6 -0
- dmart/cxb/assets/tailwind-merge-CJvxXMcu.js +1 -0
- dmart/cxb/assets/tailwind-variants-Cj20BoQ3.js +1 -0
- dmart/cxb/assets/toast-B9WDyfyI.js +1 -0
- dmart/cxb/assets/tslib-pJfR_DrR.js +1 -0
- dmart/cxb/assets/typewriter-editor-DkTVIJdm.js +25 -0
- dmart/cxb/assets/user-DeK_NB5v.js +1 -0
- dmart/cxb/assets/vanilla-picker-l5rcX3cq.js +8 -0
- dmart/cxb/assets/w3c-keyname-Vcq4gwWv.js +1 -0
- dmart/cxb/config.json +11 -0
- dmart/cxb/config.sample.json +11 -0
- dmart/cxb/favicon.ico +0 -0
- dmart/cxb/favicon.png +0 -0
- dmart/cxb/index.html +28 -0
- dmart/data_adapters/__init__.py +0 -0
- dmart/data_adapters/adapter.py +16 -0
- dmart/data_adapters/base_data_adapter.py +467 -0
- dmart/data_adapters/file/__init__.py +0 -0
- dmart/data_adapters/file/adapter.py +2043 -0
- dmart/data_adapters/file/adapter_helpers.py +1013 -0
- dmart/data_adapters/file/archive.py +150 -0
- dmart/data_adapters/file/create_index.py +331 -0
- dmart/data_adapters/file/create_users_folders.py +52 -0
- dmart/data_adapters/file/custom_validations.py +68 -0
- dmart/data_adapters/file/drop_index.py +40 -0
- dmart/data_adapters/file/health_check.py +560 -0
- dmart/data_adapters/file/redis_services.py +1110 -0
- dmart/data_adapters/helpers.py +27 -0
- dmart/data_adapters/sql/__init__.py +0 -0
- dmart/data_adapters/sql/adapter.py +3218 -0
- dmart/data_adapters/sql/adapter_helpers.py +491 -0
- dmart/data_adapters/sql/create_tables.py +451 -0
- dmart/data_adapters/sql/create_users_folders.py +53 -0
- dmart/data_adapters/sql/db_to_json_migration.py +485 -0
- dmart/data_adapters/sql/health_check_sql.py +232 -0
- dmart/data_adapters/sql/json_to_db_migration.py +454 -0
- dmart/data_adapters/sql/update_query_policies.py +101 -0
- dmart/data_generator.py +81 -0
- dmart/dmart.py +761 -0
- dmart/get_settings.py +7 -0
- dmart/hypercorn_config.toml +3 -0
- dmart/info.json +1 -0
- dmart/languages/__init__.py +0 -0
- dmart/languages/arabic.json +15 -0
- dmart/languages/english.json +16 -0
- dmart/languages/kurdish.json +14 -0
- dmart/languages/loader.py +12 -0
- dmart/login_creds.sh +7 -0
- dmart/login_creds.sh.sample +7 -0
- dmart/main.py +563 -0
- dmart/manifest.sh +12 -0
- dmart/migrate.py +24 -0
- dmart/models/__init__.py +0 -0
- dmart/models/api.py +203 -0
- dmart/models/core.py +597 -0
- dmart/models/enums.py +255 -0
- dmart/password_gen.py +8 -0
- dmart/plugins/__init__.py +0 -0
- dmart/plugins/action_log/__init__.py +0 -0
- dmart/plugins/action_log/config.json +13 -0
- dmart/plugins/action_log/plugin.py +121 -0
- dmart/plugins/admin_notification_sender/__init__.py +0 -0
- dmart/plugins/admin_notification_sender/config.json +13 -0
- dmart/plugins/admin_notification_sender/plugin.py +124 -0
- dmart/plugins/ldap_manager/__init__.py +0 -0
- dmart/plugins/ldap_manager/config.json +12 -0
- dmart/plugins/ldap_manager/dmart.schema +146 -0
- dmart/plugins/ldap_manager/plugin.py +100 -0
- dmart/plugins/ldap_manager/slapd.conf +53 -0
- dmart/plugins/local_notification/__init__.py +0 -0
- dmart/plugins/local_notification/config.json +13 -0
- dmart/plugins/local_notification/plugin.py +123 -0
- dmart/plugins/realtime_updates_notifier/__init__.py +0 -0
- dmart/plugins/realtime_updates_notifier/config.json +12 -0
- dmart/plugins/realtime_updates_notifier/plugin.py +58 -0
- dmart/plugins/redis_db_update/__init__.py +0 -0
- dmart/plugins/redis_db_update/config.json +13 -0
- dmart/plugins/redis_db_update/plugin.py +188 -0
- dmart/plugins/resource_folders_creation/__init__.py +0 -0
- dmart/plugins/resource_folders_creation/config.json +12 -0
- dmart/plugins/resource_folders_creation/plugin.py +81 -0
- dmart/plugins/system_notification_sender/__init__.py +0 -0
- dmart/plugins/system_notification_sender/config.json +13 -0
- dmart/plugins/system_notification_sender/plugin.py +188 -0
- dmart/plugins/update_access_controls/__init__.py +0 -0
- dmart/plugins/update_access_controls/config.json +12 -0
- dmart/plugins/update_access_controls/plugin.py +9 -0
- dmart/publish.sh +57 -0
- dmart/pylint.sh +16 -0
- dmart/pyrightconfig.json +7 -0
- dmart/redis_connections.sh +13 -0
- dmart/reload.sh +56 -0
- dmart/run.sh +3 -0
- dmart/run_notification_campaign.py +85 -0
- dmart/sample/spaces/applications/.dm/meta.space.json +30 -0
- dmart/sample/spaces/applications/api/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/.dm/query_all_applications/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/.dm/test_by_saad/attachments.media/meta.warframe.json +1 -0
- dmart/sample/spaces/applications/api/.dm/test_by_saad/attachments.media/warframe.png +0 -0
- dmart/sample/spaces/applications/api/.dm/test_by_saad/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/.dm/user_profile/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/create_log/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/create_public_logs/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_all_translated_data/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_logs/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_translated_enums/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_translated_others/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/.dm/query_translated_resolution/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/applications/create_log.json +1 -0
- dmart/sample/spaces/applications/api/applications/create_public_logs.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_all_translated_data.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_logs.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_translated_enums.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_translated_others.json +1 -0
- dmart/sample/spaces/applications/api/applications/query_translated_resolution.json +1 -0
- dmart/sample/spaces/applications/api/applications.json +1 -0
- dmart/sample/spaces/applications/api/management/.dm/create_subaccount/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/management/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/management/.dm/update_password/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/management/create_subaccount.json +53 -0
- dmart/sample/spaces/applications/api/management/update_password.json +1 -0
- dmart/sample/spaces/applications/api/management.json +1 -0
- dmart/sample/spaces/applications/api/query_all_applications.json +15 -0
- dmart/sample/spaces/applications/api/test_by_saad.json +1 -0
- dmart/sample/spaces/applications/api/user/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/api/user/.dm/test_by_saad/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/user/.dm/user_profile/meta.content.json +1 -0
- dmart/sample/spaces/applications/api/user/test_by_saad.json +1 -0
- dmart/sample/spaces/applications/api/user/user_profile.json +1 -0
- dmart/sample/spaces/applications/api/user_profile.json +1 -0
- dmart/sample/spaces/applications/api.json +1 -0
- dmart/sample/spaces/applications/collections/.dm/meta.folder.json +19 -0
- dmart/sample/spaces/applications/collections.json +1 -0
- dmart/sample/spaces/applications/configurations/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/configurations/time_out.json +1 -0
- dmart/sample/spaces/applications/configurations.json +19 -0
- dmart/sample/spaces/applications/errors.json +1 -0
- dmart/sample/spaces/applications/logs/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/logs.json +1 -0
- dmart/sample/spaces/applications/queries/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/queries/.dm/order/meta.content.json +1 -0
- dmart/sample/spaces/applications/queries/order.json +1 -0
- dmart/sample/spaces/applications/queries.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/api/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/configuration/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/error/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/log/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/schema/.dm/query/meta.schema.json +16 -0
- dmart/sample/spaces/applications/schema/.dm/translation/meta.schema.json +1 -0
- dmart/sample/spaces/applications/schema/api.json +28 -0
- dmart/sample/spaces/applications/schema/configuration.json +1 -0
- dmart/sample/spaces/applications/schema/error.json +43 -0
- dmart/sample/spaces/applications/schema/log.json +1 -0
- dmart/sample/spaces/applications/schema/query.json +118 -0
- dmart/sample/spaces/applications/schema/translation.json +26 -0
- dmart/sample/spaces/applications/schema.json +1 -0
- dmart/sample/spaces/applications/translations/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/applications/translations.json +1 -0
- dmart/sample/spaces/archive/.dm/meta.space.json +27 -0
- dmart/sample/spaces/custom_plugins/dummy/__pycache__/plugin.cpython-314.pyc +0 -0
- dmart/sample/spaces/custom_plugins/dummy/config.json +28 -0
- dmart/sample/spaces/custom_plugins/dummy/plugin.py +6 -0
- dmart/sample/spaces/custom_plugins/missed_entry/config.json +12 -0
- dmart/sample/spaces/custom_plugins/missed_entry/plugin.py +119 -0
- dmart/sample/spaces/custom_plugins/own_changed_notification/__pycache__/plugin.cpython-314.pyc +0 -0
- dmart/sample/spaces/custom_plugins/own_changed_notification/config.json +12 -0
- dmart/sample/spaces/custom_plugins/own_changed_notification/plugin.py +65 -0
- dmart/sample/spaces/custom_plugins/reports_stats/config.json +14 -0
- dmart/sample/spaces/custom_plugins/reports_stats/plugin.py +82 -0
- dmart/sample/spaces/custom_plugins/system_notification_sender/config.json +22 -0
- dmart/sample/spaces/custom_plugins/system_notification_sender/notification.py +268 -0
- dmart/sample/spaces/custom_plugins/system_notification_sender/plugin.py +98 -0
- dmart/sample/spaces/management/.dm/events.jsonl +32 -0
- dmart/sample/spaces/management/.dm/meta.space.json +48 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/admin.json +36 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/meta.admin.json +1 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/meta.system.json +1 -0
- dmart/sample/spaces/management/.dm/notifications/attachments.view.json/system.json +32 -0
- dmart/sample/spaces/management/collections/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/collections.json +1 -0
- dmart/sample/spaces/management/groups/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/groups.json +1 -0
- dmart/sample/spaces/management/health_check/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/health_check.json +1 -0
- dmart/sample/spaces/management/notifications/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/notifications/admin/.dm/meta.folder.json +9 -0
- dmart/sample/spaces/management/notifications/system/.dm/meta.folder.json +9 -0
- dmart/sample/spaces/management/notifications.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/access_applications/meta.permission.json +31 -0
- dmart/sample/spaces/management/permissions/.dm/access_applications_world/meta.permission.json +31 -0
- dmart/sample/spaces/management/permissions/.dm/access_messages/meta.permission.json +23 -0
- dmart/sample/spaces/management/permissions/.dm/access_personal/meta.permission.json +40 -0
- dmart/sample/spaces/management/permissions/.dm/access_protected/meta.permission.json +33 -0
- dmart/sample/spaces/management/permissions/.dm/access_public/meta.permission.json +24 -0
- dmart/sample/spaces/management/permissions/.dm/browse_all_folders/meta.permission.json +23 -0
- dmart/sample/spaces/management/permissions/.dm/create_log/meta.permission.json +24 -0
- dmart/sample/spaces/management/permissions/.dm/interviewer/meta.permission.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/manage_applications/meta.permission.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/manage_debug/meta.permission.json +25 -0
- dmart/sample/spaces/management/permissions/.dm/manage_spaces/meta.permission.json +24 -0
- dmart/sample/spaces/management/permissions/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/permissions/.dm/rules_management_default/meta.permission.json +32 -0
- dmart/sample/spaces/management/permissions/.dm/super_manager/meta.permission.json +52 -0
- dmart/sample/spaces/management/permissions/.dm/view_activity_log/meta.permission.json +26 -0
- dmart/sample/spaces/management/permissions/.dm/view_collections/meta.permission.json +29 -0
- dmart/sample/spaces/management/permissions/.dm/view_logs/meta.permission.json +30 -0
- dmart/sample/spaces/management/permissions/.dm/view_roles/meta.permission.json +29 -0
- dmart/sample/spaces/management/permissions/.dm/view_users/meta.permission.json +25 -0
- dmart/sample/spaces/management/permissions/.dm/view_world/meta.permission.json +31 -0
- dmart/sample/spaces/management/permissions/.dm/world/meta.permission.json +35 -0
- dmart/sample/spaces/management/permissions.json +1 -0
- dmart/sample/spaces/management/requests.json +1 -0
- dmart/sample/spaces/management/roles/.dm/dummy/meta.role.json +12 -0
- dmart/sample/spaces/management/roles/.dm/logged_in/meta.role.json +18 -0
- dmart/sample/spaces/management/roles/.dm/manager/meta.role.json +13 -0
- dmart/sample/spaces/management/roles/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/roles/.dm/moderator/meta.role.json +13 -0
- dmart/sample/spaces/management/roles/.dm/super_admin/meta.role.json +14 -0
- dmart/sample/spaces/management/roles/.dm/test_role/meta.role.json +13 -0
- dmart/sample/spaces/management/roles/.dm/world/meta.role.json +15 -0
- dmart/sample/spaces/management/roles.json +1 -0
- dmart/sample/spaces/management/schema/.dm/admin_notification_request/attachments.media/meta.ui_schema.json +10 -0
- dmart/sample/spaces/management/schema/.dm/admin_notification_request/attachments.media/ui_schema.json +32 -0
- dmart/sample/spaces/management/schema/.dm/admin_notification_request/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/api/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/folder_rendering/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/health_check/meta.schema.json +17 -0
- dmart/sample/spaces/management/schema/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/schema/.dm/meta_schema/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/metafile/meta.schema.json +14 -0
- dmart/sample/spaces/management/schema/.dm/notification/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/system_notification_request/attachments.media/meta.ui_schema.json +10 -0
- dmart/sample/spaces/management/schema/.dm/system_notification_request/attachments.media/ui_schema.json +32 -0
- dmart/sample/spaces/management/schema/.dm/system_notification_request/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/view/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/.dm/workflow/meta.schema.json +1 -0
- dmart/sample/spaces/management/schema/admin_notification_request.json +89 -0
- dmart/sample/spaces/management/schema/api.json +1 -0
- dmart/sample/spaces/management/schema/folder_rendering.json +238 -0
- dmart/sample/spaces/management/schema/health_check.json +8 -0
- dmart/sample/spaces/management/schema/meta_schema.json +74 -0
- dmart/sample/spaces/management/schema/metafile.json +153 -0
- dmart/sample/spaces/management/schema/notification.json +28 -0
- dmart/sample/spaces/management/schema/system_notification_request.json +57 -0
- dmart/sample/spaces/management/schema/view.json +23 -0
- dmart/sample/spaces/management/schema/workflow.json +87 -0
- dmart/sample/spaces/management/schema.json +1 -0
- dmart/sample/spaces/management/users/.dm/alibaba/meta.user.json +23 -0
- dmart/sample/spaces/management/users/.dm/anonymous/meta.user.json +18 -0
- dmart/sample/spaces/management/users/.dm/dmart/meta.user.json +26 -0
- dmart/sample/spaces/management/users/.dm/meta.folder.json +14 -0
- dmart/sample/spaces/management/workflows/.dm/channel/meta.content.json +1 -0
- dmart/sample/spaces/management/workflows/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/management/workflows/channel.json +148 -0
- dmart/sample/spaces/management/workflows.json +1 -0
- dmart/sample/spaces/maqola/.dm/meta.space.json +33 -0
- dmart/sample/spaces/personal/.dm/meta.space.json +24 -0
- dmart/sample/spaces/personal/people/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/.dm/0b5f7e7f/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/.dm/mytest/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/0b5f7e7f.json +1 -0
- dmart/sample/spaces/personal/people/dmart/messages/mytest.json +1 -0
- dmart/sample/spaces/personal/people/dmart/notifications/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/private/.dm/inner/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/private/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/private/inner.json +1 -0
- dmart/sample/spaces/personal/people/dmart/protected/.dm/avatar/meta.content.json +1 -0
- dmart/sample/spaces/personal/people/dmart/protected/.dm/meta.folder.json +1 -0
- dmart/sample/spaces/personal/people/dmart/protected/avatar.png +0 -0
- dmart/sample/spaces/personal/people/dmart/public/.dm/meta.folder.json +1 -0
- dmart/sample/test/.gitignore +2 -0
- dmart/sample/test/createcontent.json +9 -0
- dmart/sample/test/createmedia.json +9 -0
- dmart/sample/test/createmedia_entry.json +6 -0
- dmart/sample/test/createschema.json +8 -0
- dmart/sample/test/createschemawork.json +11 -0
- dmart/sample/test/createticket.json +13 -0
- dmart/sample/test/data.json +4 -0
- dmart/sample/test/deletecontent.json +12 -0
- dmart/sample/test/logo.jpeg +0 -0
- dmart/sample/test/my.jpg +0 -0
- dmart/sample/test/myticket.json +23 -0
- dmart/sample/test/resources.csv +12 -0
- dmart/sample/test/schema.json +16 -0
- dmart/sample/test/temp.json +1 -0
- dmart/sample/test/test.dmart +45 -0
- dmart/sample/test/ticket_schema.json +23 -0
- dmart/sample/test/ticket_workflow.json +85 -0
- dmart/sample/test/ticketbody.json +4 -0
- dmart/sample/test/ticketcontent.json +14 -0
- dmart/sample/test/updatecontent.json +20 -0
- dmart/sample/test/workflow_schema.json +68 -0
- dmart/scheduled_notification_handler.py +121 -0
- dmart/schema_migration.py +208 -0
- dmart/schema_modulate.py +192 -0
- dmart/set_admin_passwd.py +75 -0
- dmart/sync.py +202 -0
- dmart/test_utils.py +34 -0
- dmart/utils/__init__.py +0 -0
- dmart/utils/access_control.py +306 -0
- dmart/utils/async_request.py +8 -0
- dmart/utils/exporter.py +309 -0
- dmart/utils/firebase_notifier.py +57 -0
- dmart/utils/generate_email.py +37 -0
- dmart/utils/helpers.py +352 -0
- dmart/utils/hypercorn_config.py +12 -0
- dmart/utils/internal_error_code.py +60 -0
- dmart/utils/jwt.py +124 -0
- dmart/utils/logger.py +167 -0
- dmart/utils/middleware.py +99 -0
- dmart/utils/notification.py +75 -0
- dmart/utils/password_hashing.py +16 -0
- dmart/utils/plugin_manager.py +202 -0
- dmart/utils/query_policies_helper.py +128 -0
- dmart/utils/regex.py +44 -0
- dmart/utils/repository.py +529 -0
- dmart/utils/router_helper.py +19 -0
- dmart/utils/settings.py +212 -0
- dmart/utils/sms_notifier.py +21 -0
- dmart/utils/social_sso.py +67 -0
- dmart/utils/templates/activation.html.j2 +26 -0
- dmart/utils/templates/reminder.html.j2 +17 -0
- dmart/utils/ticket_sys_utils.py +203 -0
- dmart/utils/web_notifier.py +29 -0
- dmart/websocket.py +231 -0
- dmart-1.4.40.post8.dist-info/METADATA +75 -0
- dmart-1.4.40.post8.dist-info/RECORD +489 -0
- dmart-1.4.40.post8.dist-info/WHEEL +5 -0
- dmart-1.4.40.post8.dist-info/entry_points.txt +2 -0
- dmart-1.4.40.post8.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2043 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import shutil
|
|
3
|
+
from copy import copy
|
|
4
|
+
from shutil import copy2 as copy_file
|
|
5
|
+
from typing import Type, Any, Tuple
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
from sys import modules as sys_modules
|
|
9
|
+
from fastapi.logger import logger
|
|
10
|
+
from redis.commands.search.field import TextField
|
|
11
|
+
from redis.commands.search.index_definition import IndexDefinition, IndexType
|
|
12
|
+
from redis.commands.search.query import Query
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
import aiofiles
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from data_adapters.file.adapter_helpers import serve_query_space, serve_query_search, serve_query_subpath, \
|
|
17
|
+
serve_query_counters, serve_query_tags, serve_query_random, serve_query_history, serve_query_events, \
|
|
18
|
+
serve_query_aggregation, get_record_from_redis_doc
|
|
19
|
+
from data_adapters.helpers import trans_magic_words
|
|
20
|
+
from models.api import Exception as API_Exception, Error as API_Error
|
|
21
|
+
import models.core as core
|
|
22
|
+
from utils import regex
|
|
23
|
+
from data_adapters.file.custom_validations import get_schema_path
|
|
24
|
+
from data_adapters.base_data_adapter import BaseDataAdapter, MetaChild
|
|
25
|
+
from models.enums import ContentType, ResourceType, LockAction
|
|
26
|
+
|
|
27
|
+
from utils.helpers import arr_remove_common, read_jsonl_file, snake_case, camel_case, flatten_list_of_dicts_in_dict, \
|
|
28
|
+
flatten_dict, resolve_schema_references
|
|
29
|
+
from utils.internal_error_code import InternalErrorCode
|
|
30
|
+
from utils.middleware import get_request_data
|
|
31
|
+
from data_adapters.file.redis_services import RedisServices
|
|
32
|
+
from utils.password_hashing import hash_password
|
|
33
|
+
from utils.plugin_manager import plugin_manager
|
|
34
|
+
from utils.regex import FILE_PATTERN, FOLDER_PATTERN, SPACES_PATTERN
|
|
35
|
+
from utils.settings import settings
|
|
36
|
+
from jsonschema import Draft7Validator
|
|
37
|
+
from starlette.datastructures import UploadFile
|
|
38
|
+
from pathlib import Path as FSPath
|
|
39
|
+
import models.api as api
|
|
40
|
+
from fastapi import status
|
|
41
|
+
import json
|
|
42
|
+
import subprocess
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def sort_alteration(attachments_dict, attachments_path):
|
|
46
|
+
for attachment_name, attachments in attachments_dict.items():
|
|
47
|
+
try:
|
|
48
|
+
if attachment_name == ResourceType.alteration:
|
|
49
|
+
attachments_dict[attachment_name] = sorted(
|
|
50
|
+
attachments, key=lambda d: d.attributes["created_at"]
|
|
51
|
+
)
|
|
52
|
+
except Exception as e:
|
|
53
|
+
logger.error(
|
|
54
|
+
f"Invalid attachment entry:{attachments_path / attachment_name}.\
|
|
55
|
+
Error: {e.args}"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def is_file_check(retrieve_json_payload, resource_obj, resource_record_obj, attachment_entry):
|
|
60
|
+
return (
|
|
61
|
+
retrieve_json_payload
|
|
62
|
+
and resource_obj
|
|
63
|
+
and resource_record_obj
|
|
64
|
+
and resource_obj.payload
|
|
65
|
+
and resource_obj.payload.content_type
|
|
66
|
+
and resource_obj.payload.content_type == ContentType.json
|
|
67
|
+
and Path(
|
|
68
|
+
f"{attachment_entry.path}/{resource_obj.payload.body}"
|
|
69
|
+
).is_file()
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def locator_query_path_sub_folder(locators, query, subpath_iterator, total):
|
|
74
|
+
for one in subpath_iterator:
|
|
75
|
+
# for one in path.glob(entries_glob):
|
|
76
|
+
match = FILE_PATTERN.search(str(one.path))
|
|
77
|
+
if not match or not one.is_file():
|
|
78
|
+
continue
|
|
79
|
+
|
|
80
|
+
total += 1
|
|
81
|
+
if len(locators) >= query.limit or total < query.offset:
|
|
82
|
+
continue
|
|
83
|
+
|
|
84
|
+
shortname = match.group(1)
|
|
85
|
+
resource_name = match.group(2).lower()
|
|
86
|
+
if (
|
|
87
|
+
query.filter_types
|
|
88
|
+
and ResourceType(resource_name) not in query.filter_types
|
|
89
|
+
):
|
|
90
|
+
continue
|
|
91
|
+
|
|
92
|
+
if (
|
|
93
|
+
query.filter_shortnames
|
|
94
|
+
and shortname not in query.filter_shortnames
|
|
95
|
+
):
|
|
96
|
+
continue
|
|
97
|
+
|
|
98
|
+
locators.append(
|
|
99
|
+
core.Locator(
|
|
100
|
+
space_name=query.space_name,
|
|
101
|
+
subpath=query.subpath,
|
|
102
|
+
shortname=shortname,
|
|
103
|
+
type=ResourceType(resource_name),
|
|
104
|
+
)
|
|
105
|
+
)
|
|
106
|
+
return locators, total
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def locator_query_sub_folder(locators, query, subfolders_iterator, total):
|
|
110
|
+
for one in subfolders_iterator:
|
|
111
|
+
if not one.is_dir():
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
subfolder_meta = Path(one.path + "/.dm/meta.folder.json")
|
|
115
|
+
|
|
116
|
+
match = FOLDER_PATTERN.search(str(subfolder_meta))
|
|
117
|
+
|
|
118
|
+
if not match or not subfolder_meta.is_file():
|
|
119
|
+
continue
|
|
120
|
+
|
|
121
|
+
total += 1
|
|
122
|
+
if len(locators) >= query.limit or total < query.offset:
|
|
123
|
+
continue
|
|
124
|
+
|
|
125
|
+
shortname = match.group(1)
|
|
126
|
+
if query.filter_shortnames and shortname not in query.filter_shortnames:
|
|
127
|
+
continue
|
|
128
|
+
|
|
129
|
+
locators.append(
|
|
130
|
+
core.Locator(
|
|
131
|
+
space_name=query.space_name,
|
|
132
|
+
subpath=query.subpath,
|
|
133
|
+
shortname=shortname,
|
|
134
|
+
type=core.ResourceType.folder,
|
|
135
|
+
)
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
return locators, total
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class FileAdapter(BaseDataAdapter):
|
|
144
|
+
async def test_connection(self):
|
|
145
|
+
try:
|
|
146
|
+
async with RedisServices() as redis_services:
|
|
147
|
+
await redis_services.get_doc_by_id("spaces")
|
|
148
|
+
except Exception as e:
|
|
149
|
+
print("[!FATAL]", e)
|
|
150
|
+
sys.exit(127)
|
|
151
|
+
|
|
152
|
+
def locators_query(self, query: api.Query) -> tuple[int, list[core.Locator]]:
|
|
153
|
+
locators: list[core.Locator] = []
|
|
154
|
+
total: int = 0
|
|
155
|
+
if query.type != api.QueryType.subpath:
|
|
156
|
+
return total, locators
|
|
157
|
+
path = (
|
|
158
|
+
settings.spaces_folder
|
|
159
|
+
/ query.space_name
|
|
160
|
+
/ query.subpath
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
if query.include_fields is None:
|
|
164
|
+
query.include_fields = []
|
|
165
|
+
|
|
166
|
+
# Gel all matching entries
|
|
167
|
+
meta_path = path / ".dm"
|
|
168
|
+
if not meta_path.is_dir():
|
|
169
|
+
return total, locators
|
|
170
|
+
|
|
171
|
+
path_iterator = os.scandir(meta_path)
|
|
172
|
+
for entry in path_iterator:
|
|
173
|
+
if not entry.is_dir():
|
|
174
|
+
continue
|
|
175
|
+
|
|
176
|
+
subpath_iterator = os.scandir(entry)
|
|
177
|
+
locators, total = locator_query_path_sub_folder(locators, query, subpath_iterator, total)
|
|
178
|
+
|
|
179
|
+
# Get all matching sub folders
|
|
180
|
+
subfolders_iterator = os.scandir(path)
|
|
181
|
+
locators, total = locator_query_sub_folder(locators, query, subfolders_iterator, total)
|
|
182
|
+
|
|
183
|
+
return total, locators
|
|
184
|
+
|
|
185
|
+
def folder_path(
|
|
186
|
+
self,
|
|
187
|
+
space_name: str,
|
|
188
|
+
subpath: str,
|
|
189
|
+
shortname: str,
|
|
190
|
+
):
|
|
191
|
+
return f"{settings.spaces_folder}/{space_name}/{subpath}/{shortname}"
|
|
192
|
+
|
|
193
|
+
async def otp_created_since(self, key: str) -> int | None:
|
|
194
|
+
async with RedisServices() as redis_services:
|
|
195
|
+
ttl = await redis_services.ttl(key)
|
|
196
|
+
print(ttl)
|
|
197
|
+
if not isinstance(ttl, int):
|
|
198
|
+
return None
|
|
199
|
+
return settings.otp_token_ttl - ttl
|
|
200
|
+
|
|
201
|
+
async def save_otp(
|
|
202
|
+
self,
|
|
203
|
+
key: str,
|
|
204
|
+
otp: str,
|
|
205
|
+
):
|
|
206
|
+
async with RedisServices() as redis_services:
|
|
207
|
+
await redis_services.set(key, otp, settings.otp_token_ttl)
|
|
208
|
+
|
|
209
|
+
async def get_otp(
|
|
210
|
+
self,
|
|
211
|
+
key: str,
|
|
212
|
+
):
|
|
213
|
+
async with RedisServices() as redis_services:
|
|
214
|
+
return await redis_services.get_content_by_id(key)
|
|
215
|
+
|
|
216
|
+
async def delete_otp(self, key: str):
|
|
217
|
+
async with RedisServices() as redis_services:
|
|
218
|
+
await redis_services.del_keys([key])
|
|
219
|
+
|
|
220
|
+
def metapath(
|
|
221
|
+
self,
|
|
222
|
+
space_name: str,
|
|
223
|
+
subpath: str,
|
|
224
|
+
shortname: str,
|
|
225
|
+
class_type: Type[MetaChild],
|
|
226
|
+
schema_shortname: str | None = None,
|
|
227
|
+
) -> tuple[Path, str]:
|
|
228
|
+
"""Construct the full path of the meta file"""
|
|
229
|
+
path = settings.spaces_folder / space_name
|
|
230
|
+
|
|
231
|
+
filename = ""
|
|
232
|
+
if subpath[0] == "/":
|
|
233
|
+
subpath = f".{subpath}"
|
|
234
|
+
if issubclass(class_type, core.Folder):
|
|
235
|
+
path = path / subpath / shortname / ".dm"
|
|
236
|
+
filename = f"meta.{class_type.__name__.lower()}.json"
|
|
237
|
+
elif issubclass(class_type, core.Space):
|
|
238
|
+
path = settings.spaces_folder / space_name / ".dm"
|
|
239
|
+
filename = "meta.space.json"
|
|
240
|
+
elif issubclass(class_type, core.Attachment):
|
|
241
|
+
[parent_subpath, parent_name] = subpath.rsplit("/", 1)
|
|
242
|
+
# schema_shortname = "." + schema_shortname if schema_shortname else ""
|
|
243
|
+
attachment_folder = (
|
|
244
|
+
f"{parent_name}/attachments.{class_type.__name__.lower()}"
|
|
245
|
+
)
|
|
246
|
+
path = path / parent_subpath / ".dm" / attachment_folder
|
|
247
|
+
filename = f"meta.{shortname}.json"
|
|
248
|
+
elif issubclass(class_type, core.History):
|
|
249
|
+
[parent_subpath, parent_name] = subpath.rsplit("/", 1)
|
|
250
|
+
path = path / parent_subpath / ".dm" / f"{parent_name}/history"
|
|
251
|
+
filename = f"{shortname}.json"
|
|
252
|
+
else:
|
|
253
|
+
path = path / subpath / ".dm" / shortname
|
|
254
|
+
filename = f"meta.{snake_case(class_type.__name__)}.json"
|
|
255
|
+
return path, filename
|
|
256
|
+
|
|
257
|
+
def payload_path(
|
|
258
|
+
self,
|
|
259
|
+
space_name: str,
|
|
260
|
+
subpath: str,
|
|
261
|
+
class_type: Type[MetaChild],
|
|
262
|
+
schema_shortname: str | None = None,
|
|
263
|
+
) -> Path:
|
|
264
|
+
"""Construct the full path of the meta file"""
|
|
265
|
+
path = settings.spaces_folder / space_name
|
|
266
|
+
|
|
267
|
+
if subpath[0] == "/":
|
|
268
|
+
subpath = f".{subpath}"
|
|
269
|
+
if issubclass(class_type, core.Attachment):
|
|
270
|
+
[parent_subpath, parent_name] = subpath.rsplit("/", 1)
|
|
271
|
+
schema_shortname = "." + schema_shortname if schema_shortname else ""
|
|
272
|
+
attachment_folder = (
|
|
273
|
+
f"{parent_name}/attachments{schema_shortname}.{class_type.__name__.lower()}"
|
|
274
|
+
)
|
|
275
|
+
path = path / parent_subpath / ".dm" / attachment_folder
|
|
276
|
+
else:
|
|
277
|
+
path = path / subpath
|
|
278
|
+
return path
|
|
279
|
+
|
|
280
|
+
async def load_or_none(self,
|
|
281
|
+
space_name: str,
|
|
282
|
+
subpath: str,
|
|
283
|
+
shortname: str,
|
|
284
|
+
class_type: Type[MetaChild],
|
|
285
|
+
user_shortname: str | None = None,
|
|
286
|
+
schema_shortname: str | None = None
|
|
287
|
+
) -> MetaChild | None: # type: ignore
|
|
288
|
+
"""Load a Meta Json according to the reuqested Class type"""
|
|
289
|
+
try:
|
|
290
|
+
return await self.load(space_name, subpath, shortname, class_type, user_shortname, schema_shortname)
|
|
291
|
+
except Exception as _:
|
|
292
|
+
return None
|
|
293
|
+
|
|
294
|
+
async def query(self, query: api.Query, user_shortname: str | None = None) \
|
|
295
|
+
-> Tuple[int, list[core.Record]]:
|
|
296
|
+
records: list[core.Record] = []
|
|
297
|
+
total: int = 0
|
|
298
|
+
|
|
299
|
+
match query.type:
|
|
300
|
+
case api.QueryType.spaces:
|
|
301
|
+
total, records = await serve_query_space(self, query, user_shortname)
|
|
302
|
+
|
|
303
|
+
case api.QueryType.search:
|
|
304
|
+
total, records = await serve_query_search(self, query, user_shortname)
|
|
305
|
+
|
|
306
|
+
case api.QueryType.subpath:
|
|
307
|
+
total, records = await serve_query_subpath(self, query, user_shortname)
|
|
308
|
+
|
|
309
|
+
case api.QueryType.counters:
|
|
310
|
+
total, records = await serve_query_counters(query, user_shortname)
|
|
311
|
+
|
|
312
|
+
case api.QueryType.tags:
|
|
313
|
+
total, records = await serve_query_tags(self, query, user_shortname)
|
|
314
|
+
|
|
315
|
+
case api.QueryType.random:
|
|
316
|
+
total, records = await serve_query_random(self, query, user_shortname)
|
|
317
|
+
|
|
318
|
+
case api.QueryType.history:
|
|
319
|
+
total, records = await serve_query_history(query, user_shortname)
|
|
320
|
+
|
|
321
|
+
case api.QueryType.events:
|
|
322
|
+
total, records = await serve_query_events(query, user_shortname)
|
|
323
|
+
|
|
324
|
+
case api.QueryType.aggregation:
|
|
325
|
+
total, records = await serve_query_aggregation(self, query, user_shortname)
|
|
326
|
+
|
|
327
|
+
if getattr(query, 'join', None):
|
|
328
|
+
try:
|
|
329
|
+
records = await self._apply_client_joins(records, query.join, (user_shortname or "anonymous")) # type: ignore
|
|
330
|
+
except Exception as e:
|
|
331
|
+
print("[!client_join(file)]", e)
|
|
332
|
+
|
|
333
|
+
return total, records
|
|
334
|
+
|
|
335
|
+
async def _apply_client_joins(self, base_records: list[core.Record], joins: list, user_shortname: str) -> list[core.Record]:
|
|
336
|
+
def parse_join_on(expr: str) -> tuple[str, bool, str, bool]:
|
|
337
|
+
parts = [p.strip() for p in expr.split(':', 1)]
|
|
338
|
+
if len(parts) != 2:
|
|
339
|
+
raise ValueError(f"Invalid join_on expression: {expr}")
|
|
340
|
+
left, right = parts[0], parts[1]
|
|
341
|
+
_l_arr = left.endswith('[]')
|
|
342
|
+
_r_arr = right.endswith('[]')
|
|
343
|
+
if _l_arr:
|
|
344
|
+
left = left[:-2]
|
|
345
|
+
if _r_arr:
|
|
346
|
+
right = right[:-2]
|
|
347
|
+
return left, _l_arr, right, _r_arr
|
|
348
|
+
|
|
349
|
+
def get_values_from_record(rec: core.Record, path: str, array_hint: bool) -> list:
|
|
350
|
+
if path in ("shortname", "resource_type", "subpath", "uuid"):
|
|
351
|
+
val = getattr(rec, path, None)
|
|
352
|
+
elif path == "space_name":
|
|
353
|
+
val = rec.attributes.get("space_name") if rec.attributes else None
|
|
354
|
+
else:
|
|
355
|
+
container = rec.attributes or {}
|
|
356
|
+
# lazy import to reuse same helper as SQL
|
|
357
|
+
from data_adapters.helpers import get_nested_value as _get
|
|
358
|
+
val = _get(container, path)
|
|
359
|
+
|
|
360
|
+
if val is None:
|
|
361
|
+
return []
|
|
362
|
+
if isinstance(val, list):
|
|
363
|
+
out = []
|
|
364
|
+
for item in val:
|
|
365
|
+
if isinstance(item, (str, int, float, bool)) or item is None:
|
|
366
|
+
out.append(item)
|
|
367
|
+
return out
|
|
368
|
+
if array_hint:
|
|
369
|
+
return [val]
|
|
370
|
+
return [val]
|
|
371
|
+
|
|
372
|
+
for rec in base_records:
|
|
373
|
+
if rec.attributes is None:
|
|
374
|
+
rec.attributes = {}
|
|
375
|
+
if rec.attributes.get('join') is None:
|
|
376
|
+
rec.attributes['join'] = {}
|
|
377
|
+
|
|
378
|
+
import models.api as api
|
|
379
|
+
for join_item in joins:
|
|
380
|
+
join_on = getattr(join_item, 'join_on', None)
|
|
381
|
+
alias = getattr(join_item, 'alias', None)
|
|
382
|
+
q = getattr(join_item, 'query', None)
|
|
383
|
+
if not join_on or not alias or q is None:
|
|
384
|
+
continue
|
|
385
|
+
|
|
386
|
+
sub_query = q if isinstance(q, api.Query) else api.Query.model_validate(q)
|
|
387
|
+
import models.api as api
|
|
388
|
+
from utils.settings import settings
|
|
389
|
+
q_raw = q if isinstance(q, dict) else q.model_dump(exclude_defaults=True)
|
|
390
|
+
user_limit = q_raw.get('limit') or q_raw.get('limit_')
|
|
391
|
+
sub_query.limit = settings.max_query_limit
|
|
392
|
+
|
|
393
|
+
_total, right_records = await self.query(sub_query, user_shortname)
|
|
394
|
+
|
|
395
|
+
l_path, l_arr, r_path, r_arr = parse_join_on(join_on)
|
|
396
|
+
|
|
397
|
+
right_index: dict[str, list[core.Record]] = {}
|
|
398
|
+
for rr in right_records:
|
|
399
|
+
r_vals = get_values_from_record(rr, r_path, r_arr)
|
|
400
|
+
for v in r_vals:
|
|
401
|
+
if v is None:
|
|
402
|
+
continue
|
|
403
|
+
right_index.setdefault(str(v), []).append(rr)
|
|
404
|
+
|
|
405
|
+
for br in base_records:
|
|
406
|
+
l_vals = get_values_from_record(br, l_path, l_arr)
|
|
407
|
+
matched: list[core.Record] = []
|
|
408
|
+
for v in l_vals:
|
|
409
|
+
if v is None:
|
|
410
|
+
continue
|
|
411
|
+
matched.extend(right_index.get(str(v), []))
|
|
412
|
+
|
|
413
|
+
seen = set()
|
|
414
|
+
unique: list[core.Record] = []
|
|
415
|
+
for m in matched:
|
|
416
|
+
uid = f"{m.subpath}:{m.shortname}:{m.resource_type}"
|
|
417
|
+
if uid in seen:
|
|
418
|
+
continue
|
|
419
|
+
seen.add(uid)
|
|
420
|
+
unique.append(m)
|
|
421
|
+
|
|
422
|
+
if user_limit:
|
|
423
|
+
unique = unique[:user_limit]
|
|
424
|
+
|
|
425
|
+
br.attributes['join'][alias] = unique
|
|
426
|
+
|
|
427
|
+
return base_records
|
|
428
|
+
|
|
429
|
+
async def load(
|
|
430
|
+
self,
|
|
431
|
+
space_name: str,
|
|
432
|
+
subpath: str,
|
|
433
|
+
shortname: str,
|
|
434
|
+
class_type: Type[MetaChild],
|
|
435
|
+
user_shortname: str | None = None,
|
|
436
|
+
schema_shortname: str | None = None,
|
|
437
|
+
) -> MetaChild:
|
|
438
|
+
"""Load a Meta Json according to the requested Class type"""
|
|
439
|
+
if subpath == shortname and class_type is core.Folder:
|
|
440
|
+
shortname = ""
|
|
441
|
+
path, filename = self.metapath(
|
|
442
|
+
space_name, subpath, shortname, class_type, schema_shortname
|
|
443
|
+
)
|
|
444
|
+
if not (path / filename).is_file():
|
|
445
|
+
# Remove the folder
|
|
446
|
+
if path.is_dir() and len(os.listdir(path)) == 0:
|
|
447
|
+
shutil.rmtree(path)
|
|
448
|
+
|
|
449
|
+
raise api.Exception(
|
|
450
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
451
|
+
error=api.Error(
|
|
452
|
+
type="db",
|
|
453
|
+
code=InternalErrorCode.OBJECT_NOT_FOUND,
|
|
454
|
+
message=f"Request object is not available @{space_name}/{subpath}/{shortname} {class_type=} {schema_shortname=}",
|
|
455
|
+
),
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
path /= filename
|
|
459
|
+
content = ""
|
|
460
|
+
try:
|
|
461
|
+
async with aiofiles.open(path, "r") as file:
|
|
462
|
+
content = await file.read()
|
|
463
|
+
return class_type.model_validate_json(content)
|
|
464
|
+
except Exception as e:
|
|
465
|
+
raise Exception(f"Error Invalid Entry At: {path}. Error {e} {content=}")
|
|
466
|
+
|
|
467
|
+
async def load_resource_payload(
|
|
468
|
+
self,
|
|
469
|
+
space_name: str,
|
|
470
|
+
subpath: str,
|
|
471
|
+
filename: str,
|
|
472
|
+
class_type: Type[MetaChild],
|
|
473
|
+
schema_shortname: str | None = None,
|
|
474
|
+
):
|
|
475
|
+
"""Load a Meta class payload file"""
|
|
476
|
+
|
|
477
|
+
path = self.payload_path(space_name, subpath, class_type, schema_shortname)
|
|
478
|
+
path /= filename
|
|
479
|
+
|
|
480
|
+
if not path.is_file():
|
|
481
|
+
return None
|
|
482
|
+
try:
|
|
483
|
+
if class_type == core.Log:
|
|
484
|
+
return {"log_entry_items": read_jsonl_file(path)}
|
|
485
|
+
|
|
486
|
+
bytes = path.read_bytes()
|
|
487
|
+
return json.loads(bytes)
|
|
488
|
+
except Exception as _:
|
|
489
|
+
raise api.Exception(
|
|
490
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
491
|
+
error=api.Error(type="db", code=12, message=f"Request object is not available {path}"),
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
async def save(self, space_name: str, subpath: str, meta: core.Meta) -> Any:
|
|
495
|
+
"""Save Meta Json to respective file"""
|
|
496
|
+
try:
|
|
497
|
+
path, filename = self.metapath(
|
|
498
|
+
space_name,
|
|
499
|
+
subpath,
|
|
500
|
+
meta.shortname,
|
|
501
|
+
meta.__class__,
|
|
502
|
+
meta.payload.schema_shortname if meta.payload else None,
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
if not path.is_dir():
|
|
506
|
+
os.makedirs(path)
|
|
507
|
+
|
|
508
|
+
meta_json = meta.model_dump_json(exclude_none=True, warnings="error")
|
|
509
|
+
with open(path / filename, "w") as file:
|
|
510
|
+
file.write(meta_json)
|
|
511
|
+
file.flush()
|
|
512
|
+
os.fsync(file)
|
|
513
|
+
return meta_json
|
|
514
|
+
except Exception as e:
|
|
515
|
+
raise API_Exception(
|
|
516
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
517
|
+
error=API_Error(
|
|
518
|
+
type="create",
|
|
519
|
+
code=InternalErrorCode.OBJECT_NOT_SAVED,
|
|
520
|
+
message=e.__str__(),
|
|
521
|
+
),
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
async def create(self, space_name: str, subpath: str, meta: core.Meta):
|
|
525
|
+
path, filename = self.metapath(
|
|
526
|
+
space_name, subpath, meta.shortname, meta.__class__
|
|
527
|
+
)
|
|
528
|
+
|
|
529
|
+
if (path / filename).is_file():
|
|
530
|
+
raise api.Exception(
|
|
531
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
532
|
+
error=api.Error(
|
|
533
|
+
type="create", code=InternalErrorCode.SHORTNAME_ALREADY_EXIST, message="already exists"),
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
if not path.is_dir():
|
|
537
|
+
os.makedirs(path)
|
|
538
|
+
|
|
539
|
+
with open(path / filename, "w") as file:
|
|
540
|
+
file.write(meta.model_dump_json(exclude_none=True, warnings="error"))
|
|
541
|
+
file.flush()
|
|
542
|
+
os.fsync(file)
|
|
543
|
+
|
|
544
|
+
async def save_payload(self, space_name: str, subpath: str, meta: core.Meta, attachment):
|
|
545
|
+
path, filename = self.metapath(
|
|
546
|
+
space_name, subpath, meta.shortname, meta.__class__
|
|
547
|
+
)
|
|
548
|
+
payload_file_path = self.payload_path(
|
|
549
|
+
space_name, subpath, meta.__class__)
|
|
550
|
+
payload_filename = meta.shortname + Path(attachment.filename).suffix
|
|
551
|
+
|
|
552
|
+
if not (path / filename).is_file():
|
|
553
|
+
raise api.Exception(
|
|
554
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
555
|
+
error=api.Error(
|
|
556
|
+
type="create", code=InternalErrorCode.MISSING_METADATA, message="metadata is missing"),
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
content = await attachment.read()
|
|
560
|
+
with open(payload_file_path / payload_filename, "wb") as file:
|
|
561
|
+
file.write(content)
|
|
562
|
+
file.flush()
|
|
563
|
+
os.fsync(file)
|
|
564
|
+
|
|
565
|
+
async def save_payload_from_json(
|
|
566
|
+
self,
|
|
567
|
+
space_name: str,
|
|
568
|
+
subpath: str,
|
|
569
|
+
meta: core.Meta,
|
|
570
|
+
payload_data: dict[str, Any],
|
|
571
|
+
):
|
|
572
|
+
path, filename = self.metapath(
|
|
573
|
+
space_name,
|
|
574
|
+
subpath,
|
|
575
|
+
meta.shortname,
|
|
576
|
+
meta.__class__,
|
|
577
|
+
meta.payload.schema_shortname if meta.payload else None,
|
|
578
|
+
)
|
|
579
|
+
payload_file_path = self.payload_path(
|
|
580
|
+
space_name,
|
|
581
|
+
subpath,
|
|
582
|
+
meta.__class__,
|
|
583
|
+
meta.payload.schema_shortname if meta.payload else None,
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
payload_filename = f"{meta.shortname}.json" if not issubclass(meta.__class__,
|
|
587
|
+
core.Log) else f"{meta.shortname}.jsonl"
|
|
588
|
+
|
|
589
|
+
if not (path / filename).is_file():
|
|
590
|
+
raise api.Exception(
|
|
591
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
592
|
+
error=api.Error(
|
|
593
|
+
type="create", code=InternalErrorCode.MISSING_METADATA, message="metadata is missing"),
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
payload_json = json.dumps(payload_data)
|
|
597
|
+
if issubclass(meta.__class__, core.Log) and (payload_file_path / payload_filename).is_file():
|
|
598
|
+
with open(payload_file_path / payload_filename, "a") as file:
|
|
599
|
+
file.write(f"\n{payload_json}")
|
|
600
|
+
file.flush()
|
|
601
|
+
os.fsync(file)
|
|
602
|
+
else:
|
|
603
|
+
with open(payload_file_path / payload_filename, "w") as file:
|
|
604
|
+
file.write(payload_json)
|
|
605
|
+
file.flush()
|
|
606
|
+
os.fsync(file)
|
|
607
|
+
|
|
608
|
+
async def update(
|
|
609
|
+
self,
|
|
610
|
+
space_name: str,
|
|
611
|
+
subpath: str,
|
|
612
|
+
meta: core.Meta,
|
|
613
|
+
old_version_flattend: dict,
|
|
614
|
+
new_version_flattend: dict,
|
|
615
|
+
updated_attributes_flattend: list,
|
|
616
|
+
user_shortname: str,
|
|
617
|
+
schema_shortname: str | None = None,
|
|
618
|
+
retrieve_lock_status: bool | None = False,
|
|
619
|
+
) -> dict:
|
|
620
|
+
"""Update the entry, store the difference and return it"""
|
|
621
|
+
path, filename = self.metapath(
|
|
622
|
+
space_name,
|
|
623
|
+
subpath,
|
|
624
|
+
meta.shortname,
|
|
625
|
+
meta.__class__,
|
|
626
|
+
|
|
627
|
+
schema_shortname,
|
|
628
|
+
)
|
|
629
|
+
if not (path / filename).is_file():
|
|
630
|
+
raise api.Exception(
|
|
631
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
632
|
+
error=api.Error(type="update", code=InternalErrorCode.OBJECT_NOT_FOUND,
|
|
633
|
+
message="Request object is not available"),
|
|
634
|
+
)
|
|
635
|
+
if retrieve_lock_status:
|
|
636
|
+
async with RedisServices() as redis_services:
|
|
637
|
+
if await redis_services.is_entry_locked(
|
|
638
|
+
space_name, subpath, meta.shortname, user_shortname
|
|
639
|
+
):
|
|
640
|
+
raise api.Exception(
|
|
641
|
+
status_code=status.HTTP_403_FORBIDDEN,
|
|
642
|
+
error=api.Error(
|
|
643
|
+
type="update", code=InternalErrorCode.LOCKED_ENTRY, message="This entry is locked"),
|
|
644
|
+
)
|
|
645
|
+
elif await redis_services.get_lock_doc(
|
|
646
|
+
space_name, subpath, meta.shortname
|
|
647
|
+
):
|
|
648
|
+
# if the current can release the lock that means he is the right user
|
|
649
|
+
await redis_services.delete_lock_doc(
|
|
650
|
+
space_name, subpath, meta.shortname
|
|
651
|
+
)
|
|
652
|
+
await self.store_entry_diff(
|
|
653
|
+
space_name,
|
|
654
|
+
"/" + subpath,
|
|
655
|
+
meta.shortname,
|
|
656
|
+
user_shortname,
|
|
657
|
+
{},
|
|
658
|
+
{"lock_type": LockAction.unlock},
|
|
659
|
+
["lock_type"],
|
|
660
|
+
core.Content,
|
|
661
|
+
)
|
|
662
|
+
|
|
663
|
+
meta.updated_at = datetime.now()
|
|
664
|
+
meta_json = meta.model_dump_json(exclude_none=True, warnings="error")
|
|
665
|
+
with open(path / filename, "w") as file:
|
|
666
|
+
file.write(meta_json)
|
|
667
|
+
file.flush()
|
|
668
|
+
os.fsync(file)
|
|
669
|
+
|
|
670
|
+
if issubclass(meta.__class__, core.Log):
|
|
671
|
+
return {}
|
|
672
|
+
|
|
673
|
+
history_diff = await self.store_entry_diff(
|
|
674
|
+
space_name,
|
|
675
|
+
subpath,
|
|
676
|
+
meta.shortname,
|
|
677
|
+
user_shortname,
|
|
678
|
+
old_version_flattend,
|
|
679
|
+
new_version_flattend,
|
|
680
|
+
updated_attributes_flattend,
|
|
681
|
+
meta.__class__,
|
|
682
|
+
)
|
|
683
|
+
|
|
684
|
+
return history_diff
|
|
685
|
+
|
|
686
|
+
async def update_payload(
|
|
687
|
+
self,
|
|
688
|
+
space_name: str,
|
|
689
|
+
subpath: str,
|
|
690
|
+
meta: core.Meta,
|
|
691
|
+
payload_data: dict[str, Any],
|
|
692
|
+
owner_shortname: str,
|
|
693
|
+
):
|
|
694
|
+
await self.save_payload_from_json(
|
|
695
|
+
space_name,
|
|
696
|
+
subpath,
|
|
697
|
+
meta,
|
|
698
|
+
payload_data,
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
async def store_entry_diff(
|
|
702
|
+
self,
|
|
703
|
+
space_name: str,
|
|
704
|
+
subpath: str,
|
|
705
|
+
shortname: str,
|
|
706
|
+
owner_shortname: str,
|
|
707
|
+
old_version_flattend: dict,
|
|
708
|
+
new_version_flattend: dict,
|
|
709
|
+
updated_attributes_flattend: list,
|
|
710
|
+
resource_type,
|
|
711
|
+
) -> dict:
|
|
712
|
+
diff_keys = list(old_version_flattend.keys())
|
|
713
|
+
diff_keys.extend(list(new_version_flattend.keys()))
|
|
714
|
+
history_diff = {}
|
|
715
|
+
for key in set(diff_keys):
|
|
716
|
+
if key in ["updated_at"]:
|
|
717
|
+
continue
|
|
718
|
+
if key in updated_attributes_flattend:
|
|
719
|
+
old = (
|
|
720
|
+
copy(old_version_flattend[key])
|
|
721
|
+
if key in old_version_flattend
|
|
722
|
+
else "null"
|
|
723
|
+
)
|
|
724
|
+
new = (
|
|
725
|
+
copy(new_version_flattend[key])
|
|
726
|
+
if key in new_version_flattend
|
|
727
|
+
else "null"
|
|
728
|
+
)
|
|
729
|
+
|
|
730
|
+
if old != new:
|
|
731
|
+
if isinstance(old, list) and isinstance(new, list):
|
|
732
|
+
old, new = arr_remove_common(old, new)
|
|
733
|
+
history_diff[key] = {
|
|
734
|
+
"old": old,
|
|
735
|
+
"new": new,
|
|
736
|
+
}
|
|
737
|
+
if not history_diff:
|
|
738
|
+
return {}
|
|
739
|
+
|
|
740
|
+
history_obj = core.History(
|
|
741
|
+
shortname="history",
|
|
742
|
+
owner_shortname=owner_shortname,
|
|
743
|
+
timestamp=datetime.now(),
|
|
744
|
+
request_headers=get_request_data().get('request_headers', {}),
|
|
745
|
+
diff=history_diff,
|
|
746
|
+
)
|
|
747
|
+
history_path = settings.spaces_folder / space_name
|
|
748
|
+
|
|
749
|
+
if subpath == "/" and resource_type == core.Space:
|
|
750
|
+
history_path = Path(f"{history_path}/.dm")
|
|
751
|
+
else:
|
|
752
|
+
if issubclass(resource_type, core.Attachment):
|
|
753
|
+
history_path = Path(f"{history_path}/.dm/{subpath}")
|
|
754
|
+
else:
|
|
755
|
+
if subpath == "/":
|
|
756
|
+
history_path = Path(f"{history_path}/.dm/{shortname}")
|
|
757
|
+
else:
|
|
758
|
+
history_path = Path(
|
|
759
|
+
f"{history_path}/{subpath}/.dm/{shortname}")
|
|
760
|
+
|
|
761
|
+
if not os.path.exists(history_path):
|
|
762
|
+
os.makedirs(history_path)
|
|
763
|
+
|
|
764
|
+
async with aiofiles.open(
|
|
765
|
+
f"{history_path}/history.jsonl",
|
|
766
|
+
"a",
|
|
767
|
+
) as events_file:
|
|
768
|
+
await events_file.write(f"{history_obj.model_dump_json(exclude_none=True, warnings='error')}\n")
|
|
769
|
+
|
|
770
|
+
return history_diff
|
|
771
|
+
|
|
772
|
+
async def move(
|
|
773
|
+
self,
|
|
774
|
+
src_space_name: str,
|
|
775
|
+
src_subpath: str,
|
|
776
|
+
src_shortname: str,
|
|
777
|
+
dest_space_name: str,
|
|
778
|
+
dest_subpath: str,
|
|
779
|
+
dest_shortname: str,
|
|
780
|
+
meta: core.Meta,
|
|
781
|
+
):
|
|
782
|
+
src_path, src_filename = self.metapath(
|
|
783
|
+
src_space_name,
|
|
784
|
+
src_subpath,
|
|
785
|
+
src_shortname,
|
|
786
|
+
meta.__class__,
|
|
787
|
+
)
|
|
788
|
+
dest_path, dest_filename = self.metapath(
|
|
789
|
+
dest_space_name,
|
|
790
|
+
dest_subpath or src_subpath,
|
|
791
|
+
dest_shortname or src_shortname,
|
|
792
|
+
meta.__class__,
|
|
793
|
+
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
meta_updated = False
|
|
797
|
+
dest_path_without_dm = dest_path
|
|
798
|
+
if dest_shortname:
|
|
799
|
+
meta.shortname = dest_shortname
|
|
800
|
+
meta_updated = True
|
|
801
|
+
|
|
802
|
+
if src_path.parts[-1] == ".dm":
|
|
803
|
+
src_path = Path("/".join(src_path.parts[:-1]))
|
|
804
|
+
|
|
805
|
+
if dest_path.parts[-1] == ".dm":
|
|
806
|
+
dest_path_without_dm = Path("/".join(dest_path.parts[:-1]))
|
|
807
|
+
|
|
808
|
+
if dest_path_without_dm.is_dir() and len(os.listdir(dest_path_without_dm)):
|
|
809
|
+
raise api.Exception(
|
|
810
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
811
|
+
error=api.Error(
|
|
812
|
+
type="move",
|
|
813
|
+
code=InternalErrorCode.NOT_ALLOWED_LOCATION,
|
|
814
|
+
message="The destination folder is not empty",
|
|
815
|
+
),
|
|
816
|
+
)
|
|
817
|
+
|
|
818
|
+
# Create dest dir if there's a change in the subpath AND the shortname
|
|
819
|
+
# and the subpath shortname folder doesn't exist,
|
|
820
|
+
if (
|
|
821
|
+
src_shortname != dest_shortname
|
|
822
|
+
and src_subpath != dest_subpath
|
|
823
|
+
and not os.path.isdir(dest_path_without_dm)
|
|
824
|
+
):
|
|
825
|
+
os.makedirs(dest_path_without_dm)
|
|
826
|
+
|
|
827
|
+
os.rename(src=src_path, dst=dest_path_without_dm)
|
|
828
|
+
|
|
829
|
+
# Move payload file with the meta file
|
|
830
|
+
if (
|
|
831
|
+
meta.payload
|
|
832
|
+
and meta.payload.content_type != ContentType.text
|
|
833
|
+
and isinstance(meta.payload.body, str)
|
|
834
|
+
):
|
|
835
|
+
src_payload_file_path = (
|
|
836
|
+
self.payload_path(src_space_name, src_subpath, meta.__class__)
|
|
837
|
+
/ meta.payload.body
|
|
838
|
+
)
|
|
839
|
+
file_extension = Path(meta.payload.body).suffix
|
|
840
|
+
if file_extension.startswith('.'):
|
|
841
|
+
file_extension = file_extension[1:]
|
|
842
|
+
meta.payload.body = meta.shortname + "." + file_extension
|
|
843
|
+
dist_payload_file_path = (
|
|
844
|
+
self.payload_path(
|
|
845
|
+
dest_space_name, dest_subpath or src_subpath, meta.__class__
|
|
846
|
+
)
|
|
847
|
+
/ meta.payload.body
|
|
848
|
+
)
|
|
849
|
+
if src_payload_file_path.is_file():
|
|
850
|
+
os.rename(src=src_payload_file_path, dst=dist_payload_file_path)
|
|
851
|
+
|
|
852
|
+
if meta_updated:
|
|
853
|
+
meta_json = meta.model_dump_json(exclude_none=True, warnings="error")
|
|
854
|
+
with open(dest_path / dest_filename, "w") as opened_file:
|
|
855
|
+
opened_file.write(meta_json)
|
|
856
|
+
opened_file.flush()
|
|
857
|
+
os.fsync(opened_file)
|
|
858
|
+
|
|
859
|
+
# Delete Src path if empty
|
|
860
|
+
if src_path.parent.is_dir():
|
|
861
|
+
self.delete_empty(src_path)
|
|
862
|
+
|
|
863
|
+
def delete_empty(self, path: Path):
|
|
864
|
+
if path.is_dir() and len(os.listdir(path)) == 0:
|
|
865
|
+
os.removedirs(path)
|
|
866
|
+
|
|
867
|
+
if path.parent.is_dir() and len(os.listdir(path.parent)) == 0:
|
|
868
|
+
self.delete_empty(path.parent)
|
|
869
|
+
|
|
870
|
+
async def clone(
|
|
871
|
+
self,
|
|
872
|
+
src_space: str,
|
|
873
|
+
dest_space: str,
|
|
874
|
+
src_subpath: str,
|
|
875
|
+
src_shortname: str,
|
|
876
|
+
dest_subpath: str,
|
|
877
|
+
dest_shortname: str,
|
|
878
|
+
class_type: Type[MetaChild],
|
|
879
|
+
):
|
|
880
|
+
|
|
881
|
+
meta_obj = await self.load(
|
|
882
|
+
space_name=src_space,
|
|
883
|
+
subpath=src_subpath,
|
|
884
|
+
shortname=src_shortname,
|
|
885
|
+
class_type=class_type,
|
|
886
|
+
)
|
|
887
|
+
|
|
888
|
+
src_path, src_filename = self.metapath(
|
|
889
|
+
src_space, src_subpath, src_shortname, class_type
|
|
890
|
+
)
|
|
891
|
+
dest_path, dest_filename = self.metapath(
|
|
892
|
+
dest_space,
|
|
893
|
+
dest_subpath,
|
|
894
|
+
dest_shortname,
|
|
895
|
+
class_type,
|
|
896
|
+
|
|
897
|
+
)
|
|
898
|
+
|
|
899
|
+
# Create dest dir if not exist
|
|
900
|
+
if not os.path.isdir(dest_path):
|
|
901
|
+
os.makedirs(dest_path)
|
|
902
|
+
|
|
903
|
+
copy_file(src=src_path / src_filename, dst=dest_path / dest_filename)
|
|
904
|
+
|
|
905
|
+
self.payload_path(src_space, src_subpath, class_type)
|
|
906
|
+
# Move payload file with the meta file
|
|
907
|
+
if (
|
|
908
|
+
meta_obj.payload
|
|
909
|
+
and meta_obj.payload.content_type != ContentType.text
|
|
910
|
+
and isinstance(meta_obj.payload.body, str)
|
|
911
|
+
):
|
|
912
|
+
src_payload_file_path = (
|
|
913
|
+
self.payload_path(src_space, src_subpath, class_type)
|
|
914
|
+
/ meta_obj.payload.body
|
|
915
|
+
)
|
|
916
|
+
dist_payload_file_path = (
|
|
917
|
+
self.payload_path(
|
|
918
|
+
dest_space, dest_subpath, class_type
|
|
919
|
+
)
|
|
920
|
+
/ meta_obj.payload.body
|
|
921
|
+
)
|
|
922
|
+
copy_file(src=src_payload_file_path, dst=dist_payload_file_path)
|
|
923
|
+
|
|
924
|
+
async def is_entry_exist(
|
|
925
|
+
self,
|
|
926
|
+
space_name: str,
|
|
927
|
+
subpath: str,
|
|
928
|
+
shortname: str,
|
|
929
|
+
resource_type: ResourceType,
|
|
930
|
+
schema_shortname: str | None = None,
|
|
931
|
+
) -> bool:
|
|
932
|
+
"""Check if an entry with the given name already exist or not in the given path
|
|
933
|
+
|
|
934
|
+
Args:
|
|
935
|
+
space_name (str): The target space name
|
|
936
|
+
subpath (str): The target subpath
|
|
937
|
+
shortname (str): the target shortname
|
|
938
|
+
class_type (MetaChild): The target class of the entry
|
|
939
|
+
schema_shortname (str | None, optional): schema shortname of the entry. Defaults to None.
|
|
940
|
+
|
|
941
|
+
Returns:
|
|
942
|
+
bool: True if it's already exist, False otherwise
|
|
943
|
+
"""
|
|
944
|
+
if subpath[0] == "/":
|
|
945
|
+
subpath = f".{subpath}"
|
|
946
|
+
|
|
947
|
+
payload_file = settings.spaces_folder / space_name / \
|
|
948
|
+
subpath / f"{shortname}.json"
|
|
949
|
+
if payload_file.is_file():
|
|
950
|
+
return True
|
|
951
|
+
|
|
952
|
+
for r_type in ResourceType:
|
|
953
|
+
# Spaces compared with each others only
|
|
954
|
+
if r_type == ResourceType.space and r_type != resource_type:
|
|
955
|
+
continue
|
|
956
|
+
resource_cls = getattr(
|
|
957
|
+
sys.modules["models.core"], camel_case(r_type.value), None
|
|
958
|
+
)
|
|
959
|
+
if not resource_cls:
|
|
960
|
+
continue
|
|
961
|
+
meta_path, meta_file = self.metapath(
|
|
962
|
+
space_name, subpath, shortname, resource_cls, schema_shortname)
|
|
963
|
+
if (meta_path / meta_file).is_file():
|
|
964
|
+
return True
|
|
965
|
+
|
|
966
|
+
return False
|
|
967
|
+
|
|
968
|
+
async def delete(
|
|
969
|
+
self,
|
|
970
|
+
space_name: str,
|
|
971
|
+
subpath: str,
|
|
972
|
+
meta: core.Meta,
|
|
973
|
+
user_shortname: str,
|
|
974
|
+
schema_shortname: str | None = None,
|
|
975
|
+
retrieve_lock_status: bool | None = False,
|
|
976
|
+
):
|
|
977
|
+
|
|
978
|
+
path, filename = self.metapath(
|
|
979
|
+
space_name,
|
|
980
|
+
subpath,
|
|
981
|
+
meta.shortname,
|
|
982
|
+
meta.__class__,
|
|
983
|
+
|
|
984
|
+
schema_shortname,
|
|
985
|
+
)
|
|
986
|
+
if not path.is_dir() or not (path / filename).is_file():
|
|
987
|
+
raise api.Exception(
|
|
988
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
989
|
+
error=api.Error(
|
|
990
|
+
type="delete", code=InternalErrorCode.OBJECT_NOT_FOUND, message="Request object is not available"),
|
|
991
|
+
)
|
|
992
|
+
if retrieve_lock_status:
|
|
993
|
+
async with RedisServices() as redis_services:
|
|
994
|
+
if await redis_services.is_entry_locked(
|
|
995
|
+
space_name, subpath, meta.shortname, user_shortname
|
|
996
|
+
):
|
|
997
|
+
raise api.Exception(
|
|
998
|
+
status_code=status.HTTP_403_FORBIDDEN,
|
|
999
|
+
error=api.Error(
|
|
1000
|
+
type="delete", code=InternalErrorCode.LOCKED_ENTRY, message="This entry is locked"),
|
|
1001
|
+
)
|
|
1002
|
+
else:
|
|
1003
|
+
# if the current can release the lock that means he is the right user
|
|
1004
|
+
await redis_services.delete_lock_doc(
|
|
1005
|
+
space_name, subpath, meta.shortname
|
|
1006
|
+
)
|
|
1007
|
+
|
|
1008
|
+
pathname = path / filename
|
|
1009
|
+
if pathname.is_file():
|
|
1010
|
+
os.remove(pathname)
|
|
1011
|
+
|
|
1012
|
+
# Delete payload file
|
|
1013
|
+
if meta.payload and meta.payload.content_type not in ContentType.inline_types():
|
|
1014
|
+
payload_file_path = self.payload_path(
|
|
1015
|
+
space_name, subpath, meta.__class__
|
|
1016
|
+
) / str(meta.payload.body)
|
|
1017
|
+
if payload_file_path.exists() and payload_file_path.is_file():
|
|
1018
|
+
os.remove(payload_file_path)
|
|
1019
|
+
|
|
1020
|
+
history_path = f"{settings.spaces_folder}/{space_name}" + \
|
|
1021
|
+
f"{subpath}/.dm/{meta.shortname}"
|
|
1022
|
+
|
|
1023
|
+
if (
|
|
1024
|
+
path.is_dir()
|
|
1025
|
+
and (
|
|
1026
|
+
not isinstance(meta, core.Attachment)
|
|
1027
|
+
or len(os.listdir(path)) == 0
|
|
1028
|
+
)
|
|
1029
|
+
):
|
|
1030
|
+
shutil.rmtree(path)
|
|
1031
|
+
# in case of folder the path = {folder_name}/.dm
|
|
1032
|
+
if isinstance(meta, core.Folder) and path.parent.is_dir():
|
|
1033
|
+
shutil.rmtree(path.parent)
|
|
1034
|
+
if isinstance(meta, core.Folder) and Path(history_path).is_dir():
|
|
1035
|
+
shutil.rmtree(history_path)
|
|
1036
|
+
|
|
1037
|
+
async def lock_handler(self, space_name: str, subpath: str, shortname: str, user_shortname: str,
|
|
1038
|
+
action: LockAction) -> dict | None:
|
|
1039
|
+
match action:
|
|
1040
|
+
case LockAction.lock:
|
|
1041
|
+
async with RedisServices() as redis_services:
|
|
1042
|
+
lock_type = await redis_services.save_lock_doc(
|
|
1043
|
+
space_name,
|
|
1044
|
+
subpath,
|
|
1045
|
+
shortname,
|
|
1046
|
+
user_shortname,
|
|
1047
|
+
settings.lock_period,
|
|
1048
|
+
)
|
|
1049
|
+
return {lock_type: lock_type}
|
|
1050
|
+
case LockAction.fetch:
|
|
1051
|
+
async with RedisServices() as redis_services:
|
|
1052
|
+
lock_payload = await redis_services.get_lock_doc(
|
|
1053
|
+
space_name, subpath, shortname
|
|
1054
|
+
)
|
|
1055
|
+
return dict(lock_payload)
|
|
1056
|
+
case LockAction.unlock:
|
|
1057
|
+
async with RedisServices() as redis_services:
|
|
1058
|
+
await redis_services.delete_lock_doc(
|
|
1059
|
+
space_name, subpath, shortname
|
|
1060
|
+
)
|
|
1061
|
+
return None
|
|
1062
|
+
|
|
1063
|
+
async def fetch_space(self, space_name: str) -> core.Space | None:
|
|
1064
|
+
spaces = await self.get_spaces()
|
|
1065
|
+
if space_name not in spaces:
|
|
1066
|
+
return None
|
|
1067
|
+
return core.Space.model_validate_json(spaces[space_name])
|
|
1068
|
+
|
|
1069
|
+
async def get_entry_attachments(
|
|
1070
|
+
self,
|
|
1071
|
+
subpath: str,
|
|
1072
|
+
attachments_path: Path,
|
|
1073
|
+
filter_types: list | None = None,
|
|
1074
|
+
include_fields: list | None = None,
|
|
1075
|
+
filter_shortnames: list | None = None,
|
|
1076
|
+
retrieve_json_payload: bool = False,
|
|
1077
|
+
) -> dict:
|
|
1078
|
+
if not attachments_path.is_dir():
|
|
1079
|
+
return {}
|
|
1080
|
+
try:
|
|
1081
|
+
attachments_iterator = os.scandir(attachments_path)
|
|
1082
|
+
attachments_dict: dict[ResourceType, list] = {}
|
|
1083
|
+
for attachment_entry in attachments_iterator:
|
|
1084
|
+
# TODO: Filter types on the parent attachment type folder layer
|
|
1085
|
+
if not attachment_entry.is_dir():
|
|
1086
|
+
continue
|
|
1087
|
+
|
|
1088
|
+
attachments_files = os.scandir(attachment_entry)
|
|
1089
|
+
for attachments_file in attachments_files:
|
|
1090
|
+
match = regex.ATTACHMENT_PATTERN.search(str(attachments_file.path))
|
|
1091
|
+
if not match or not attachments_file.is_file():
|
|
1092
|
+
continue
|
|
1093
|
+
|
|
1094
|
+
attach_shortname = match.group(2)
|
|
1095
|
+
attach_resource_name = match.group(1).lower()
|
|
1096
|
+
if filter_shortnames and attach_shortname not in filter_shortnames:
|
|
1097
|
+
continue
|
|
1098
|
+
|
|
1099
|
+
if filter_types and ResourceType(attach_resource_name) not in filter_types:
|
|
1100
|
+
continue
|
|
1101
|
+
|
|
1102
|
+
resource_class = getattr(
|
|
1103
|
+
sys.modules["models.core"], camel_case(attach_resource_name)
|
|
1104
|
+
)
|
|
1105
|
+
resource_obj = None
|
|
1106
|
+
async with aiofiles.open(attachments_file, "r") as meta_file:
|
|
1107
|
+
try:
|
|
1108
|
+
resource_obj = resource_class.model_validate_json(await meta_file.read())
|
|
1109
|
+
except Exception as e:
|
|
1110
|
+
raise Exception(
|
|
1111
|
+
f"Bad attachment ... {attachments_file=}"
|
|
1112
|
+
) from e
|
|
1113
|
+
|
|
1114
|
+
resource_record_obj = resource_obj.to_record(
|
|
1115
|
+
subpath, attach_shortname, include_fields
|
|
1116
|
+
)
|
|
1117
|
+
if is_file_check(retrieve_json_payload, resource_obj, resource_record_obj, attachment_entry):
|
|
1118
|
+
async with aiofiles.open(
|
|
1119
|
+
f"{attachment_entry.path}/{resource_obj.payload.body}", "r"
|
|
1120
|
+
) as payload_file_content:
|
|
1121
|
+
resource_record_obj.attributes["payload"].body = json.loads(
|
|
1122
|
+
await payload_file_content.read()
|
|
1123
|
+
)
|
|
1124
|
+
|
|
1125
|
+
if attach_resource_name in attachments_dict:
|
|
1126
|
+
attachments_dict[ResourceType(attach_resource_name)].append(
|
|
1127
|
+
resource_record_obj)
|
|
1128
|
+
else:
|
|
1129
|
+
attachments_dict[ResourceType(attach_resource_name)] = [resource_record_obj]
|
|
1130
|
+
attachments_files.close()
|
|
1131
|
+
attachments_iterator.close()
|
|
1132
|
+
|
|
1133
|
+
# SORT ALTERATION ATTACHMENTS BY ALTERATION.CREATED_AT
|
|
1134
|
+
sort_alteration(attachments_dict, attachments_path)
|
|
1135
|
+
|
|
1136
|
+
return attachments_dict
|
|
1137
|
+
except Exception as e:
|
|
1138
|
+
print(e)
|
|
1139
|
+
return {}
|
|
1140
|
+
|
|
1141
|
+
async def get_spaces(self) -> dict:
|
|
1142
|
+
async with RedisServices() as redis_services:
|
|
1143
|
+
value = await redis_services.get_doc_by_id("spaces")
|
|
1144
|
+
if isinstance(value, dict):
|
|
1145
|
+
return value
|
|
1146
|
+
return {}
|
|
1147
|
+
|
|
1148
|
+
async def validate_uniqueness(
|
|
1149
|
+
self, space_name: str, record: core.Record, action: str = api.RequestType.create, user_shortname=None
|
|
1150
|
+
) -> bool:
|
|
1151
|
+
"""
|
|
1152
|
+
Get list of unique fields from entry's folder meta data
|
|
1153
|
+
ensure that each sub-list in the list is unique across all entries
|
|
1154
|
+
"""
|
|
1155
|
+
folder_meta_path = (
|
|
1156
|
+
settings.spaces_folder
|
|
1157
|
+
/ space_name
|
|
1158
|
+
/ f"{record.subpath[1:] if record.subpath[0] == '/' else record.subpath}.json"
|
|
1159
|
+
)
|
|
1160
|
+
|
|
1161
|
+
if not folder_meta_path.is_file():
|
|
1162
|
+
return True
|
|
1163
|
+
|
|
1164
|
+
async with aiofiles.open(folder_meta_path, "r") as file:
|
|
1165
|
+
content = await file.read()
|
|
1166
|
+
folder_meta = json.loads(content)
|
|
1167
|
+
|
|
1168
|
+
if not isinstance(folder_meta.get("unique_fields", None), list):
|
|
1169
|
+
return True
|
|
1170
|
+
|
|
1171
|
+
entry_dict_flattened: dict[Any, Any] = flatten_list_of_dicts_in_dict(
|
|
1172
|
+
flatten_dict(record.attributes)
|
|
1173
|
+
)
|
|
1174
|
+
redis_escape_chars = str.maketrans(
|
|
1175
|
+
{".": r"\.", "@": r"\@", ":": r"\:", "/": r"\/", "-": r"\-", " ": r"\ "}
|
|
1176
|
+
)
|
|
1177
|
+
redis_replace_chars: dict[int, str] = str.maketrans(
|
|
1178
|
+
{".": r".", "@": r".", ":": r"\:", "/": r"\/", "-": r"\-", " ": r"\ "}
|
|
1179
|
+
)
|
|
1180
|
+
# Go over each composite unique array of fields and make sure there's no entry with those values
|
|
1181
|
+
for composite_unique_keys in folder_meta["unique_fields"]:
|
|
1182
|
+
redis_search_str = ""
|
|
1183
|
+
for unique_key in composite_unique_keys:
|
|
1184
|
+
base_unique_key = unique_key
|
|
1185
|
+
if unique_key.endswith("_unescaped"):
|
|
1186
|
+
unique_key = unique_key.replace("_unescaped", "")
|
|
1187
|
+
if unique_key.endswith("_replace_specials"):
|
|
1188
|
+
unique_key = unique_key.replace("_replace_specials", "")
|
|
1189
|
+
if not entry_dict_flattened.get(unique_key, None):
|
|
1190
|
+
continue
|
|
1191
|
+
|
|
1192
|
+
redis_column = unique_key.split("payload.body.")[-1].replace(".", "_")
|
|
1193
|
+
|
|
1194
|
+
# construct redis search string
|
|
1195
|
+
if (
|
|
1196
|
+
base_unique_key.endswith("_unescaped")
|
|
1197
|
+
):
|
|
1198
|
+
redis_search_str += (
|
|
1199
|
+
" @"
|
|
1200
|
+
+ base_unique_key
|
|
1201
|
+
+ ":{"
|
|
1202
|
+
+ entry_dict_flattened[unique_key]
|
|
1203
|
+
.translate(redis_escape_chars)
|
|
1204
|
+
.replace("\\\\", "\\")
|
|
1205
|
+
+ "}"
|
|
1206
|
+
)
|
|
1207
|
+
elif (
|
|
1208
|
+
base_unique_key.endswith("_replace_specials") or unique_key.endswith('email')
|
|
1209
|
+
):
|
|
1210
|
+
redis_search_str += (
|
|
1211
|
+
" @"
|
|
1212
|
+
+ redis_column
|
|
1213
|
+
+ ":"
|
|
1214
|
+
+ entry_dict_flattened[unique_key]
|
|
1215
|
+
.translate(redis_replace_chars)
|
|
1216
|
+
.replace("\\\\", "\\")
|
|
1217
|
+
)
|
|
1218
|
+
|
|
1219
|
+
elif (
|
|
1220
|
+
isinstance(entry_dict_flattened[unique_key], list)
|
|
1221
|
+
):
|
|
1222
|
+
redis_search_str += (
|
|
1223
|
+
" @"
|
|
1224
|
+
+ redis_column
|
|
1225
|
+
+ ":{"
|
|
1226
|
+
+ "|".join([
|
|
1227
|
+
item.translate(redis_escape_chars).replace("\\\\", "\\") for item in
|
|
1228
|
+
entry_dict_flattened[unique_key]
|
|
1229
|
+
])
|
|
1230
|
+
+ "}"
|
|
1231
|
+
)
|
|
1232
|
+
elif isinstance(entry_dict_flattened[unique_key], (str, bool)): # booleans are indexed as TextField
|
|
1233
|
+
redis_search_str += (
|
|
1234
|
+
" @"
|
|
1235
|
+
+ redis_column
|
|
1236
|
+
+ ":"
|
|
1237
|
+
+ entry_dict_flattened[unique_key]
|
|
1238
|
+
.translate(redis_escape_chars)
|
|
1239
|
+
.replace("\\\\", "\\")
|
|
1240
|
+
)
|
|
1241
|
+
|
|
1242
|
+
elif isinstance(entry_dict_flattened[unique_key], int):
|
|
1243
|
+
redis_search_str += (
|
|
1244
|
+
" @"
|
|
1245
|
+
+ redis_column
|
|
1246
|
+
+ f":[{entry_dict_flattened[unique_key]} {entry_dict_flattened[unique_key]}]"
|
|
1247
|
+
)
|
|
1248
|
+
else:
|
|
1249
|
+
continue
|
|
1250
|
+
|
|
1251
|
+
if not redis_search_str:
|
|
1252
|
+
continue
|
|
1253
|
+
|
|
1254
|
+
subpath = record.subpath
|
|
1255
|
+
if subpath[0] == "/":
|
|
1256
|
+
subpath = subpath[1:]
|
|
1257
|
+
|
|
1258
|
+
redis_search_str += f" @subpath:{subpath}"
|
|
1259
|
+
|
|
1260
|
+
if action == api.RequestType.update:
|
|
1261
|
+
redis_search_str += f" (-@shortname:{record.shortname})"
|
|
1262
|
+
|
|
1263
|
+
schema_name = record.attributes.get("payload", {}).get("schema_shortname", None)
|
|
1264
|
+
|
|
1265
|
+
for index in RedisServices.CUSTOM_INDICES:
|
|
1266
|
+
if space_name == index["space"] and index["subpath"] == subpath:
|
|
1267
|
+
schema_name = "meta"
|
|
1268
|
+
break
|
|
1269
|
+
|
|
1270
|
+
if not schema_name:
|
|
1271
|
+
continue
|
|
1272
|
+
|
|
1273
|
+
async with RedisServices() as redis_services:
|
|
1274
|
+
redis_search_res = await redis_services.search(
|
|
1275
|
+
space_name=space_name,
|
|
1276
|
+
search=redis_search_str,
|
|
1277
|
+
limit=1,
|
|
1278
|
+
offset=0,
|
|
1279
|
+
filters={},
|
|
1280
|
+
schema_name=schema_name,
|
|
1281
|
+
)
|
|
1282
|
+
|
|
1283
|
+
if redis_search_res and redis_search_res["total"] > 0:
|
|
1284
|
+
raise API_Exception(
|
|
1285
|
+
status.HTTP_400_BAD_REQUEST,
|
|
1286
|
+
API_Error(
|
|
1287
|
+
type="request",
|
|
1288
|
+
code=InternalErrorCode.DATA_SHOULD_BE_UNIQUE,
|
|
1289
|
+
message=f"Entry should have unique values on the following fields: {', '.join(composite_unique_keys)}",
|
|
1290
|
+
),
|
|
1291
|
+
)
|
|
1292
|
+
return True
|
|
1293
|
+
|
|
1294
|
+
async def validate_payload_with_schema(
|
|
1295
|
+
self,
|
|
1296
|
+
payload_data: UploadFile | dict,
|
|
1297
|
+
space_name: str,
|
|
1298
|
+
schema_shortname: str,
|
|
1299
|
+
):
|
|
1300
|
+
if not isinstance(payload_data, (dict, UploadFile)):
|
|
1301
|
+
raise API_Exception(
|
|
1302
|
+
status.HTTP_400_BAD_REQUEST,
|
|
1303
|
+
API_Error(
|
|
1304
|
+
type="request",
|
|
1305
|
+
code=InternalErrorCode.INVALID_DATA,
|
|
1306
|
+
message="Invalid payload.body",
|
|
1307
|
+
),
|
|
1308
|
+
)
|
|
1309
|
+
|
|
1310
|
+
schema_path = get_schema_path(
|
|
1311
|
+
space_name=space_name,
|
|
1312
|
+
schema_shortname=f"{schema_shortname}.json",
|
|
1313
|
+
)
|
|
1314
|
+
|
|
1315
|
+
schema = json.loads(FSPath(schema_path).read_text())
|
|
1316
|
+
|
|
1317
|
+
if not isinstance(payload_data, dict):
|
|
1318
|
+
data = json.load(payload_data.file)
|
|
1319
|
+
payload_data.file.seek(0)
|
|
1320
|
+
else:
|
|
1321
|
+
data = payload_data
|
|
1322
|
+
|
|
1323
|
+
Draft7Validator(schema).validate(data) # type: ignore
|
|
1324
|
+
|
|
1325
|
+
async def get_failed_password_attempt_count(self, user_shortname: str) -> int:
|
|
1326
|
+
async with RedisServices() as redis_services:
|
|
1327
|
+
failed_login_attempts_count = 0
|
|
1328
|
+
raw_failed_login_attempts_count = await redis_services.get(f"users:failed_login_attempts/{user_shortname}")
|
|
1329
|
+
if raw_failed_login_attempts_count:
|
|
1330
|
+
failed_login_attempts_count = int(raw_failed_login_attempts_count)
|
|
1331
|
+
return failed_login_attempts_count
|
|
1332
|
+
|
|
1333
|
+
async def clear_failed_password_attempts(self, user_shortname: str):
|
|
1334
|
+
async with RedisServices() as redis_services:
|
|
1335
|
+
return await redis_services.del_keys([f"users:failed_login_attempts/{user_shortname}"])
|
|
1336
|
+
|
|
1337
|
+
async def set_failed_password_attempt_count(self, user_shortname: str, attempt_count: int):
|
|
1338
|
+
async with RedisServices() as redis_services:
|
|
1339
|
+
return await redis_services.set(f"users:failed_login_attempts/{user_shortname}", attempt_count)
|
|
1340
|
+
|
|
1341
|
+
async def get_invitation(self, invitation_token: str):
|
|
1342
|
+
async with RedisServices() as redis_services:
|
|
1343
|
+
# FIXME invitation_token = await redis_services.getdel_key(
|
|
1344
|
+
token = await redis_services.get_key(
|
|
1345
|
+
f"users:login:invitation:{invitation_token}"
|
|
1346
|
+
)
|
|
1347
|
+
|
|
1348
|
+
if not token:
|
|
1349
|
+
raise Exception(
|
|
1350
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
1351
|
+
api.Error(
|
|
1352
|
+
type="jwtauth", code=InternalErrorCode.INVALID_INVITATION, message="Invalid invitation"),
|
|
1353
|
+
)
|
|
1354
|
+
|
|
1355
|
+
return token
|
|
1356
|
+
|
|
1357
|
+
async def delete_invitation(self, invitation_token: str) -> bool:
|
|
1358
|
+
async with RedisServices() as redis_services:
|
|
1359
|
+
try:
|
|
1360
|
+
await redis_services.delete(f"users:login:invitation:{invitation_token}")
|
|
1361
|
+
return True
|
|
1362
|
+
except Exception as e:
|
|
1363
|
+
logger.error(f"Error deleting invitation token {e}")
|
|
1364
|
+
return False
|
|
1365
|
+
|
|
1366
|
+
async def get_url_shortner(self, token_uuid: str) -> str | None:
|
|
1367
|
+
async with RedisServices() as redis_services:
|
|
1368
|
+
return await redis_services.get_key(f"short/{token_uuid}")
|
|
1369
|
+
|
|
1370
|
+
async def get_latest_history(
|
|
1371
|
+
self,
|
|
1372
|
+
space_name: str,
|
|
1373
|
+
subpath: str,
|
|
1374
|
+
shortname: str,
|
|
1375
|
+
) -> Any | None:
|
|
1376
|
+
history_path = settings.spaces_folder / space_name
|
|
1377
|
+
|
|
1378
|
+
if subpath == "/" or subpath == "":
|
|
1379
|
+
path1 = history_path / ".dm" / "history.jsonl"
|
|
1380
|
+
path2 = history_path / ".dm" / shortname / "history.jsonl"
|
|
1381
|
+
|
|
1382
|
+
if path2.is_file():
|
|
1383
|
+
path = path2
|
|
1384
|
+
elif path1.is_file():
|
|
1385
|
+
path = path1
|
|
1386
|
+
else:
|
|
1387
|
+
return None
|
|
1388
|
+
else:
|
|
1389
|
+
path1 = history_path / subpath / ".dm" / shortname / "history.jsonl"
|
|
1390
|
+
path2 = history_path / ".dm" / subpath / "history.jsonl"
|
|
1391
|
+
|
|
1392
|
+
if path1.is_file():
|
|
1393
|
+
path = path1
|
|
1394
|
+
elif path2.is_file():
|
|
1395
|
+
path = path2
|
|
1396
|
+
else:
|
|
1397
|
+
return None
|
|
1398
|
+
|
|
1399
|
+
try:
|
|
1400
|
+
r1 = subprocess.Popen(
|
|
1401
|
+
["tail", "-n", "1", str(path)], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
1402
|
+
)
|
|
1403
|
+
r2, _ = r1.communicate()
|
|
1404
|
+
if r2:
|
|
1405
|
+
return json.loads(r2.decode().strip())
|
|
1406
|
+
except Exception:
|
|
1407
|
+
pass
|
|
1408
|
+
return None
|
|
1409
|
+
|
|
1410
|
+
async def get_entry_by_criteria(self, criteria: dict, table: Any = None) -> core.Record | None:
|
|
1411
|
+
async with RedisServices() as redis_services:
|
|
1412
|
+
_search_query = ""
|
|
1413
|
+
for k, v in criteria.items():
|
|
1414
|
+
_search_query += f"@{k}:({v.replace('@', '?')}) "
|
|
1415
|
+
r_search = await redis_services.search(
|
|
1416
|
+
space_name=settings.management_space,
|
|
1417
|
+
search=_search_query,
|
|
1418
|
+
filters={"subpath": [table]},
|
|
1419
|
+
limit=1,
|
|
1420
|
+
offset=0,
|
|
1421
|
+
)
|
|
1422
|
+
if not r_search["data"]:
|
|
1423
|
+
return None
|
|
1424
|
+
|
|
1425
|
+
records = []
|
|
1426
|
+
for data in r_search["data"]:
|
|
1427
|
+
records.append(
|
|
1428
|
+
json.loads(data)
|
|
1429
|
+
)
|
|
1430
|
+
return records[0] if len(records) > 0 else None
|
|
1431
|
+
|
|
1432
|
+
async def get_media_attachment(self, space_name: str, subpath: str, shortname: str) -> io.BytesIO | None:
|
|
1433
|
+
pass
|
|
1434
|
+
|
|
1435
|
+
async def get_user_session(self, user_shortname: str, token: str) -> Tuple[int, str | None]:
|
|
1436
|
+
async with RedisServices() as redis:
|
|
1437
|
+
return 1, await redis.get_key(
|
|
1438
|
+
f"user_session:{user_shortname}"
|
|
1439
|
+
)
|
|
1440
|
+
|
|
1441
|
+
async def remove_user_session(self, user_shortname: str) -> bool:
|
|
1442
|
+
async with RedisServices() as redis:
|
|
1443
|
+
return bool(
|
|
1444
|
+
await redis.del_keys([f"user_session:{user_shortname}"])
|
|
1445
|
+
)
|
|
1446
|
+
|
|
1447
|
+
async def set_invitation(self, invitation_token: str, invitation_value):
|
|
1448
|
+
async with RedisServices() as redis_services:
|
|
1449
|
+
await redis_services.set_key(
|
|
1450
|
+
f"users:login:invitation:{invitation_token}",
|
|
1451
|
+
invitation_value
|
|
1452
|
+
)
|
|
1453
|
+
|
|
1454
|
+
async def set_user_session(self, user_shortname: str, token: str) -> bool:
|
|
1455
|
+
async with RedisServices() as redis:
|
|
1456
|
+
if settings.max_sessions_per_user == 1:
|
|
1457
|
+
if await redis.get_key(
|
|
1458
|
+
f"user_session:{user_shortname}"
|
|
1459
|
+
):
|
|
1460
|
+
await redis.del_keys([f"user_session:{user_shortname}"])
|
|
1461
|
+
|
|
1462
|
+
return bool(await redis.set_key(
|
|
1463
|
+
key=f"user_session:{user_shortname}",
|
|
1464
|
+
value=hash_password(token),
|
|
1465
|
+
ex=settings.session_inactivity_ttl,
|
|
1466
|
+
))
|
|
1467
|
+
|
|
1468
|
+
async def set_url_shortner(self, token_uuid: str, url: str):
|
|
1469
|
+
async with RedisServices() as redis_services:
|
|
1470
|
+
await redis_services.set_key(
|
|
1471
|
+
f"short/{token_uuid}",
|
|
1472
|
+
url,
|
|
1473
|
+
ex=settings.url_shorter_expires,
|
|
1474
|
+
nx=False,
|
|
1475
|
+
)
|
|
1476
|
+
|
|
1477
|
+
async def delete_url_shortner(self, token_uuid: str) -> bool:
|
|
1478
|
+
async with RedisServices() as redis_services:
|
|
1479
|
+
return bool(
|
|
1480
|
+
await redis_services.del_keys([f"short/{token_uuid}"])
|
|
1481
|
+
)
|
|
1482
|
+
|
|
1483
|
+
|
|
1484
|
+
async def delete_url_shortner_by_token(self, invitation_token: str) -> bool:
|
|
1485
|
+
#TODO: implement this method
|
|
1486
|
+
return True
|
|
1487
|
+
|
|
1488
|
+
|
|
1489
|
+
async def get_schema(self, space_name: str, schema_shortname: str, owner_shortname: str) -> dict:
|
|
1490
|
+
schema_path = (
|
|
1491
|
+
self.payload_path(space_name, "schema", core.Schema)
|
|
1492
|
+
/ f"{schema_shortname}.json"
|
|
1493
|
+
)
|
|
1494
|
+
with open(schema_path) as schema_file:
|
|
1495
|
+
schema_content = json.load(schema_file)
|
|
1496
|
+
|
|
1497
|
+
return resolve_schema_references(schema_content)
|
|
1498
|
+
|
|
1499
|
+
async def check_uniqueness(self, unique_fields, search_str, redis_escape_chars) -> dict:
|
|
1500
|
+
async with RedisServices() as redis_man:
|
|
1501
|
+
for key, value in unique_fields.items():
|
|
1502
|
+
if not value:
|
|
1503
|
+
continue
|
|
1504
|
+
|
|
1505
|
+
value = value.translate(redis_escape_chars).replace("\\\\", "\\")
|
|
1506
|
+
if key == "email_unescaped":
|
|
1507
|
+
value = f"{{{value}}}"
|
|
1508
|
+
|
|
1509
|
+
redis_search_res = await redis_man.search(
|
|
1510
|
+
space_name=settings.management_space,
|
|
1511
|
+
search=search_str + f" @{key}:{value}",
|
|
1512
|
+
limit=0,
|
|
1513
|
+
offset=0,
|
|
1514
|
+
filters={},
|
|
1515
|
+
)
|
|
1516
|
+
|
|
1517
|
+
if redis_search_res and redis_search_res["total"] > 0:
|
|
1518
|
+
return {"unique": False, "field": key}
|
|
1519
|
+
|
|
1520
|
+
return {"unique": True}
|
|
1521
|
+
|
|
1522
|
+
async def get_role_permissions(self, role: core.Role) -> list[core.Permission]:
|
|
1523
|
+
permissions_options = "|".join(role.permissions)
|
|
1524
|
+
async with RedisServices() as redis_services:
|
|
1525
|
+
permissions_search = await redis_services.search(
|
|
1526
|
+
space_name=settings.management_space,
|
|
1527
|
+
search=f"@shortname:{permissions_options}",
|
|
1528
|
+
filters={"subpath": ["permissions"]},
|
|
1529
|
+
limit=10000,
|
|
1530
|
+
offset=0,
|
|
1531
|
+
)
|
|
1532
|
+
if not permissions_search:
|
|
1533
|
+
return []
|
|
1534
|
+
|
|
1535
|
+
role_permissions: list[core.Permission] = []
|
|
1536
|
+
|
|
1537
|
+
for permission_doc in permissions_search["data"]:
|
|
1538
|
+
permission_doc = json.loads(permission_doc)
|
|
1539
|
+
if permission_doc['resource_type'] == 'permission':
|
|
1540
|
+
permission = core.Permission.model_validate(permission_doc)
|
|
1541
|
+
role_permissions.append(permission)
|
|
1542
|
+
|
|
1543
|
+
return role_permissions
|
|
1544
|
+
|
|
1545
|
+
async def get_user_roles(self, user_shortname: str) -> dict[str, core.Role]:
|
|
1546
|
+
user_meta: core.User = await self.load_user_meta(user_shortname)
|
|
1547
|
+
user_associated_roles = user_meta.roles
|
|
1548
|
+
user_associated_roles.append("logged_in")
|
|
1549
|
+
async with RedisServices() as redis_services:
|
|
1550
|
+
roles_search = await redis_services.search(
|
|
1551
|
+
space_name=settings.management_space,
|
|
1552
|
+
search="@shortname:(" + "|".join(user_associated_roles) + ")",
|
|
1553
|
+
filters={"subpath": ["roles"]},
|
|
1554
|
+
limit=10000,
|
|
1555
|
+
offset=0,
|
|
1556
|
+
)
|
|
1557
|
+
|
|
1558
|
+
user_roles_from_groups = await self.get_user_roles_from_groups(user_meta)
|
|
1559
|
+
if not roles_search and not user_roles_from_groups:
|
|
1560
|
+
return {}
|
|
1561
|
+
|
|
1562
|
+
user_roles: dict[str, core.Role] = {}
|
|
1563
|
+
|
|
1564
|
+
all_user_roles_from_redis = []
|
|
1565
|
+
for redis_document in roles_search["data"]:
|
|
1566
|
+
all_user_roles_from_redis.append(redis_document)
|
|
1567
|
+
|
|
1568
|
+
all_user_roles_from_redis.extend(user_roles_from_groups)
|
|
1569
|
+
for role_json in all_user_roles_from_redis:
|
|
1570
|
+
role = core.Role.model_validate(json.loads(role_json))
|
|
1571
|
+
user_roles[role.shortname] = role
|
|
1572
|
+
|
|
1573
|
+
return user_roles
|
|
1574
|
+
|
|
1575
|
+
async def load_user_meta(self, user_shortname: str) -> Any:
|
|
1576
|
+
async with RedisServices() as redis_services:
|
|
1577
|
+
user_meta_doc_id = redis_services.generate_doc_id(
|
|
1578
|
+
space_name=settings.management_space,
|
|
1579
|
+
schema_shortname="meta",
|
|
1580
|
+
subpath="users",
|
|
1581
|
+
shortname=user_shortname,
|
|
1582
|
+
)
|
|
1583
|
+
value: dict = await redis_services.get_doc_by_id(user_meta_doc_id)
|
|
1584
|
+
|
|
1585
|
+
if not value:
|
|
1586
|
+
user = await self.load(
|
|
1587
|
+
space_name=settings.management_space,
|
|
1588
|
+
shortname=user_shortname,
|
|
1589
|
+
subpath="users",
|
|
1590
|
+
class_type=core.User,
|
|
1591
|
+
user_shortname=user_shortname,
|
|
1592
|
+
)
|
|
1593
|
+
await redis_services.save_meta_doc(
|
|
1594
|
+
settings.management_space,
|
|
1595
|
+
"users",
|
|
1596
|
+
user,
|
|
1597
|
+
)
|
|
1598
|
+
else:
|
|
1599
|
+
user = core.User.model_validate(value)
|
|
1600
|
+
return user
|
|
1601
|
+
|
|
1602
|
+
async def generate_user_permissions(self, user_shortname: str) -> dict:
|
|
1603
|
+
try:
|
|
1604
|
+
user_permissions: dict = {}
|
|
1605
|
+
|
|
1606
|
+
user_roles = await self.get_user_roles(user_shortname)
|
|
1607
|
+
for _, role in user_roles.items():
|
|
1608
|
+
role_permissions = await self.get_role_permissions(role)
|
|
1609
|
+
permission_world_record = await self.load_or_none(
|
|
1610
|
+
settings.management_space,
|
|
1611
|
+
'permissions',
|
|
1612
|
+
"world",
|
|
1613
|
+
core.Permission
|
|
1614
|
+
)
|
|
1615
|
+
if permission_world_record:
|
|
1616
|
+
role_permissions.append(permission_world_record)
|
|
1617
|
+
|
|
1618
|
+
for permission in role_permissions:
|
|
1619
|
+
for space_name, permission_subpaths in permission.subpaths.items():
|
|
1620
|
+
for permission_subpath in permission_subpaths:
|
|
1621
|
+
permission_subpath = trans_magic_words(permission_subpath, user_shortname)
|
|
1622
|
+
for permission_resource_types in permission.resource_types:
|
|
1623
|
+
actions = set(permission.actions)
|
|
1624
|
+
conditions = set(permission.conditions)
|
|
1625
|
+
if (
|
|
1626
|
+
f"{space_name}:{permission_subpath}:{permission_resource_types}"
|
|
1627
|
+
in user_permissions
|
|
1628
|
+
):
|
|
1629
|
+
old_perm = user_permissions[
|
|
1630
|
+
f"{space_name}:{permission_subpath}:{permission_resource_types}"
|
|
1631
|
+
]
|
|
1632
|
+
|
|
1633
|
+
if isinstance(actions, list):
|
|
1634
|
+
actions = set(actions)
|
|
1635
|
+
actions |= set(old_perm["allowed_actions"])
|
|
1636
|
+
|
|
1637
|
+
if isinstance(conditions, list):
|
|
1638
|
+
conditions = set(conditions)
|
|
1639
|
+
conditions |= set(old_perm["conditions"])
|
|
1640
|
+
|
|
1641
|
+
user_permissions[
|
|
1642
|
+
f"{space_name}:{permission_subpath}:{permission_resource_types}"
|
|
1643
|
+
] = {
|
|
1644
|
+
"allowed_actions": list(actions),
|
|
1645
|
+
"conditions": list(conditions),
|
|
1646
|
+
"restricted_fields": permission.restricted_fields,
|
|
1647
|
+
"allowed_fields_values": permission.allowed_fields_values
|
|
1648
|
+
}
|
|
1649
|
+
async with RedisServices() as redis_services:
|
|
1650
|
+
await redis_services.save_doc(
|
|
1651
|
+
f"users_permissions_{user_shortname}", user_permissions
|
|
1652
|
+
)
|
|
1653
|
+
return user_permissions
|
|
1654
|
+
except Exception as e:
|
|
1655
|
+
logger.error(f"Error generating user permissions: {e}")
|
|
1656
|
+
raise api.Exception(
|
|
1657
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
|
1658
|
+
error=api.Error(
|
|
1659
|
+
type="system",
|
|
1660
|
+
code=InternalErrorCode.UNPROCESSABLE_ENTITY,
|
|
1661
|
+
message=str(e),
|
|
1662
|
+
),
|
|
1663
|
+
)
|
|
1664
|
+
|
|
1665
|
+
async def get_user_permissions(self, user_shortname: str) -> dict:
|
|
1666
|
+
async with RedisServices() as redis_services:
|
|
1667
|
+
user_permissions: dict = await redis_services.get_doc_by_id(
|
|
1668
|
+
f"users_permissions_{user_shortname}"
|
|
1669
|
+
)
|
|
1670
|
+
|
|
1671
|
+
if not user_permissions:
|
|
1672
|
+
return await self.generate_user_permissions(user_shortname)
|
|
1673
|
+
|
|
1674
|
+
return user_permissions
|
|
1675
|
+
|
|
1676
|
+
async def get_user_by_criteria(self, key: str, value: str) -> str | None:
|
|
1677
|
+
async with RedisServices() as redis_services:
|
|
1678
|
+
user_search = await redis_services.search(
|
|
1679
|
+
space_name=settings.management_space,
|
|
1680
|
+
search=f"@{key}:({value.replace('@', '?')})",
|
|
1681
|
+
filters={"subpath": ["users"]},
|
|
1682
|
+
limit=10000,
|
|
1683
|
+
offset=0,
|
|
1684
|
+
)
|
|
1685
|
+
if not user_search["data"]:
|
|
1686
|
+
return None
|
|
1687
|
+
|
|
1688
|
+
data = json.loads(user_search["data"][0])
|
|
1689
|
+
if data.get("shortname") and isinstance(data["shortname"], str):
|
|
1690
|
+
return data["shortname"]
|
|
1691
|
+
else:
|
|
1692
|
+
return None
|
|
1693
|
+
|
|
1694
|
+
async def get_payload_from_event(self, event) -> dict:
|
|
1695
|
+
mypayload = await self.load_resource_payload(
|
|
1696
|
+
event.space_name,
|
|
1697
|
+
event.subpath,
|
|
1698
|
+
event.payload.body,
|
|
1699
|
+
getattr(sys_modules["models.core"], camel_case(event.resource_type)),
|
|
1700
|
+
)
|
|
1701
|
+
return mypayload if mypayload else {}
|
|
1702
|
+
|
|
1703
|
+
async def get_user_roles_from_groups(self, user_meta: core.User) -> list:
|
|
1704
|
+
if not user_meta.groups:
|
|
1705
|
+
return []
|
|
1706
|
+
|
|
1707
|
+
async with RedisServices() as redis_services:
|
|
1708
|
+
groups_search = await redis_services.search(
|
|
1709
|
+
space_name=settings.management_space,
|
|
1710
|
+
search="@shortname:(" + "|".join(user_meta.groups) + ")",
|
|
1711
|
+
filters={"subpath": ["groups"]},
|
|
1712
|
+
limit=10000,
|
|
1713
|
+
offset=0,
|
|
1714
|
+
)
|
|
1715
|
+
if not groups_search:
|
|
1716
|
+
return []
|
|
1717
|
+
|
|
1718
|
+
roles = []
|
|
1719
|
+
for group in groups_search["data"]:
|
|
1720
|
+
group_json = json.loads(group)
|
|
1721
|
+
for role_shortname in group_json["roles"]:
|
|
1722
|
+
role = await redis_services.get_doc_by_id(
|
|
1723
|
+
redis_services.generate_doc_id(
|
|
1724
|
+
space_name=settings.management_space,
|
|
1725
|
+
schema_shortname="meta",
|
|
1726
|
+
shortname=role_shortname,
|
|
1727
|
+
subpath="roles"
|
|
1728
|
+
)
|
|
1729
|
+
)
|
|
1730
|
+
if role:
|
|
1731
|
+
roles.append(role)
|
|
1732
|
+
|
|
1733
|
+
return roles
|
|
1734
|
+
|
|
1735
|
+
async def drop_index(self, space_name):
|
|
1736
|
+
async with RedisServices() as redis_services:
|
|
1737
|
+
x = await redis_services.list_indices()
|
|
1738
|
+
if x:
|
|
1739
|
+
indices: list[str] = x
|
|
1740
|
+
for index in indices:
|
|
1741
|
+
if index.startswith(f"{space_name}:"):
|
|
1742
|
+
await redis_services.drop_index(index, True)
|
|
1743
|
+
|
|
1744
|
+
async def initialize_spaces(self) -> None:
|
|
1745
|
+
if not settings.spaces_folder.is_dir():
|
|
1746
|
+
raise NotADirectoryError(
|
|
1747
|
+
f"{settings.spaces_folder} directory does not exist!"
|
|
1748
|
+
)
|
|
1749
|
+
|
|
1750
|
+
spaces: dict[str, str] = {}
|
|
1751
|
+
for one in settings.spaces_folder.glob("*/.dm/meta.space.json"):
|
|
1752
|
+
match = SPACES_PATTERN.search(str(one))
|
|
1753
|
+
if not match:
|
|
1754
|
+
continue
|
|
1755
|
+
space_name = match.group(1)
|
|
1756
|
+
|
|
1757
|
+
space_obj = core.Space.model_validate_json(one.read_text())
|
|
1758
|
+
spaces[space_name] = space_obj.model_dump_json()
|
|
1759
|
+
|
|
1760
|
+
async with RedisServices() as redis_services:
|
|
1761
|
+
await redis_services.save_doc("spaces", spaces)
|
|
1762
|
+
|
|
1763
|
+
async def create_user_premission_index(self) -> None:
|
|
1764
|
+
async with RedisServices() as redis_services:
|
|
1765
|
+
try:
|
|
1766
|
+
# Check if index already exist
|
|
1767
|
+
await redis_services.ft("user_permission").info()
|
|
1768
|
+
except Exception:
|
|
1769
|
+
await redis_services.ft("user_permission").create_index(
|
|
1770
|
+
fields=[TextField("name")], # type: ignore
|
|
1771
|
+
definition=IndexDefinition(
|
|
1772
|
+
prefix=["users_permissions"],
|
|
1773
|
+
index_type=IndexType.JSON,
|
|
1774
|
+
)
|
|
1775
|
+
)
|
|
1776
|
+
|
|
1777
|
+
async def store_modules_to_redis(self, roles, groups, permissions) -> None:
|
|
1778
|
+
modules = [
|
|
1779
|
+
{"subpath": "roles", "value": roles},
|
|
1780
|
+
{"subpath": "groups", "value": groups},
|
|
1781
|
+
{"subpath": "permissions", "value": permissions},
|
|
1782
|
+
]
|
|
1783
|
+
async with RedisServices() as redis_services:
|
|
1784
|
+
for module in modules:
|
|
1785
|
+
for _, object in module['value'].items():
|
|
1786
|
+
await redis_services.save_meta_doc(
|
|
1787
|
+
space_name=settings.management_space,
|
|
1788
|
+
subpath=module['subpath'],
|
|
1789
|
+
meta=object,
|
|
1790
|
+
)
|
|
1791
|
+
|
|
1792
|
+
async def delete_user_permissions_map_in_redis(self) -> None:
|
|
1793
|
+
async with RedisServices() as redis_services:
|
|
1794
|
+
search_query = Query("*").no_content()
|
|
1795
|
+
redis_res = await redis_services.ft("user_permission").search(search_query) # type: ignore
|
|
1796
|
+
if redis_res and isinstance(redis_res, dict) and "results" in redis_res:
|
|
1797
|
+
results = redis_res["results"]
|
|
1798
|
+
keys = [doc["id"] for doc in results]
|
|
1799
|
+
if len(keys) > 0:
|
|
1800
|
+
await redis_services.del_keys(keys)
|
|
1801
|
+
|
|
1802
|
+
async def internal_save_model(
|
|
1803
|
+
self,
|
|
1804
|
+
space_name: str,
|
|
1805
|
+
subpath: str,
|
|
1806
|
+
meta: core.Meta,
|
|
1807
|
+
payload: dict | None = None
|
|
1808
|
+
):
|
|
1809
|
+
await self.save(
|
|
1810
|
+
space_name=space_name,
|
|
1811
|
+
subpath=subpath,
|
|
1812
|
+
meta=meta,
|
|
1813
|
+
)
|
|
1814
|
+
|
|
1815
|
+
async with RedisServices() as redis:
|
|
1816
|
+
await redis.save_meta_doc(
|
|
1817
|
+
space_name,
|
|
1818
|
+
subpath,
|
|
1819
|
+
meta,
|
|
1820
|
+
)
|
|
1821
|
+
|
|
1822
|
+
if payload:
|
|
1823
|
+
await self.save_payload_from_json(
|
|
1824
|
+
space_name=space_name,
|
|
1825
|
+
subpath=subpath,
|
|
1826
|
+
meta=meta,
|
|
1827
|
+
payload_data=payload,
|
|
1828
|
+
)
|
|
1829
|
+
payload.update(json.loads(meta.model_dump_json(exclude_none=True, warnings="error")))
|
|
1830
|
+
await redis.save_payload_doc(
|
|
1831
|
+
space_name,
|
|
1832
|
+
subpath,
|
|
1833
|
+
meta,
|
|
1834
|
+
payload,
|
|
1835
|
+
ResourceType(snake_case(type(meta).__name__))
|
|
1836
|
+
)
|
|
1837
|
+
|
|
1838
|
+
async def internal_sys_update_model(
|
|
1839
|
+
self,
|
|
1840
|
+
space_name: str,
|
|
1841
|
+
subpath: str,
|
|
1842
|
+
meta: core.Meta,
|
|
1843
|
+
updates: dict,
|
|
1844
|
+
sync_redis: bool = True,
|
|
1845
|
+
payload_dict: dict[str, Any] = {},
|
|
1846
|
+
):
|
|
1847
|
+
meta.updated_at = datetime.now()
|
|
1848
|
+
meta_updated = False
|
|
1849
|
+
payload_updated = False
|
|
1850
|
+
|
|
1851
|
+
if not payload_dict:
|
|
1852
|
+
try:
|
|
1853
|
+
body = str(meta.payload.body) if meta and meta.payload else ""
|
|
1854
|
+
mydict = await self.load_resource_payload(
|
|
1855
|
+
space_name, subpath, body, core.Content
|
|
1856
|
+
)
|
|
1857
|
+
payload_dict = mydict if mydict else {}
|
|
1858
|
+
except Exception:
|
|
1859
|
+
pass
|
|
1860
|
+
|
|
1861
|
+
restricted_fields = [
|
|
1862
|
+
"uuid",
|
|
1863
|
+
"shortname",
|
|
1864
|
+
"created_at",
|
|
1865
|
+
"updated_at",
|
|
1866
|
+
"owner_shortname",
|
|
1867
|
+
"payload",
|
|
1868
|
+
]
|
|
1869
|
+
old_version_flattend = {**meta.model_dump()}
|
|
1870
|
+
for key, value in updates.items():
|
|
1871
|
+
if key in restricted_fields:
|
|
1872
|
+
continue
|
|
1873
|
+
|
|
1874
|
+
if key in meta.model_fields.keys():
|
|
1875
|
+
meta_updated = True
|
|
1876
|
+
meta.__setattr__(key, value)
|
|
1877
|
+
elif payload_dict:
|
|
1878
|
+
payload_dict[key] = value
|
|
1879
|
+
payload_updated = True
|
|
1880
|
+
|
|
1881
|
+
if meta_updated:
|
|
1882
|
+
await self.update(
|
|
1883
|
+
space_name,
|
|
1884
|
+
subpath,
|
|
1885
|
+
meta,
|
|
1886
|
+
old_version_flattend,
|
|
1887
|
+
{**meta.model_dump()},
|
|
1888
|
+
list(updates.keys()),
|
|
1889
|
+
meta.shortname
|
|
1890
|
+
)
|
|
1891
|
+
if payload_updated and meta.payload and meta.payload.schema_shortname:
|
|
1892
|
+
await self.validate_payload_with_schema(
|
|
1893
|
+
payload_dict, space_name, meta.payload.schema_shortname
|
|
1894
|
+
)
|
|
1895
|
+
await self.save_payload_from_json(
|
|
1896
|
+
space_name, subpath, meta, payload_dict
|
|
1897
|
+
)
|
|
1898
|
+
|
|
1899
|
+
if not sync_redis:
|
|
1900
|
+
return
|
|
1901
|
+
|
|
1902
|
+
async with RedisServices() as redis_services:
|
|
1903
|
+
await redis_services.save_meta_doc(space_name, subpath, meta)
|
|
1904
|
+
if payload_updated:
|
|
1905
|
+
payload_dict.update(json.loads(meta.model_dump_json(exclude_none=True, warnings="error")))
|
|
1906
|
+
await redis_services.save_payload_doc(
|
|
1907
|
+
space_name,
|
|
1908
|
+
subpath,
|
|
1909
|
+
meta,
|
|
1910
|
+
payload_dict,
|
|
1911
|
+
ResourceType(snake_case(type(meta).__name__)),
|
|
1912
|
+
)
|
|
1913
|
+
|
|
1914
|
+
|
|
1915
|
+
async def get_entry_by_var(
|
|
1916
|
+
self,
|
|
1917
|
+
key: str,
|
|
1918
|
+
val: str,
|
|
1919
|
+
logged_in_user,
|
|
1920
|
+
retrieve_json_payload: bool = False,
|
|
1921
|
+
retrieve_attachments: bool = False,
|
|
1922
|
+
retrieve_lock_status: bool = False,
|
|
1923
|
+
) -> core.Record | None:
|
|
1924
|
+
spaces = await self.get_spaces()
|
|
1925
|
+
entry_doc = None
|
|
1926
|
+
entry_space = None
|
|
1927
|
+
async with RedisServices() as redis_services:
|
|
1928
|
+
for space_name, space in spaces.items():
|
|
1929
|
+
space = json.loads(space)
|
|
1930
|
+
if not space['indexing_enabled']:
|
|
1931
|
+
continue
|
|
1932
|
+
search_res = await redis_services.search(
|
|
1933
|
+
space_name=space_name,
|
|
1934
|
+
search=f"@{key}:{val}*",
|
|
1935
|
+
limit=1,
|
|
1936
|
+
offset=0,
|
|
1937
|
+
filters={},
|
|
1938
|
+
)
|
|
1939
|
+
if search_res["total"] > 0:
|
|
1940
|
+
entry_doc = json.loads(search_res["data"][0])
|
|
1941
|
+
entry_space = space_name
|
|
1942
|
+
break
|
|
1943
|
+
|
|
1944
|
+
if not entry_doc or not entry_space:
|
|
1945
|
+
raise api.Exception(
|
|
1946
|
+
status.HTTP_400_BAD_REQUEST,
|
|
1947
|
+
error=api.Error(
|
|
1948
|
+
type="media", code=InternalErrorCode.OBJECT_NOT_FOUND, message="Request object is not available"
|
|
1949
|
+
),
|
|
1950
|
+
)
|
|
1951
|
+
|
|
1952
|
+
from utils.access_control import access_control
|
|
1953
|
+
if not await access_control.check_access(
|
|
1954
|
+
user_shortname=logged_in_user,
|
|
1955
|
+
space_name=entry_space,
|
|
1956
|
+
subpath=entry_doc["subpath"],
|
|
1957
|
+
resource_type=entry_doc["resource_type"],
|
|
1958
|
+
action_type=core.ActionType.view,
|
|
1959
|
+
resource_is_active=entry_doc["is_active"],
|
|
1960
|
+
resource_owner_shortname=entry_doc.get("owner_shortname"),
|
|
1961
|
+
resource_owner_group=entry_doc.get("owner_group_shortname"),
|
|
1962
|
+
entry_shortname=entry_doc.get("shortname")
|
|
1963
|
+
):
|
|
1964
|
+
raise api.Exception(
|
|
1965
|
+
status.HTTP_401_UNAUTHORIZED,
|
|
1966
|
+
api.Error(
|
|
1967
|
+
type="request",
|
|
1968
|
+
code=InternalErrorCode.NOT_ALLOWED,
|
|
1969
|
+
message="You don't have permission to this action [12]",
|
|
1970
|
+
),
|
|
1971
|
+
)
|
|
1972
|
+
|
|
1973
|
+
await plugin_manager.before_action(
|
|
1974
|
+
core.Event(
|
|
1975
|
+
space_name=entry_space,
|
|
1976
|
+
subpath=entry_doc["subpath"],
|
|
1977
|
+
shortname=entry_doc["shortname"],
|
|
1978
|
+
action_type=core.ActionType.view,
|
|
1979
|
+
resource_type=entry_doc["resource_type"],
|
|
1980
|
+
user_shortname=logged_in_user,
|
|
1981
|
+
)
|
|
1982
|
+
)
|
|
1983
|
+
|
|
1984
|
+
resource_base_record = await get_record_from_redis_doc(
|
|
1985
|
+
self,
|
|
1986
|
+
space_name=entry_space,
|
|
1987
|
+
doc=entry_doc,
|
|
1988
|
+
retrieve_json_payload=retrieve_json_payload,
|
|
1989
|
+
retrieve_attachments=retrieve_attachments,
|
|
1990
|
+
validate_schema=True,
|
|
1991
|
+
retrieve_lock_status=retrieve_lock_status,
|
|
1992
|
+
)
|
|
1993
|
+
|
|
1994
|
+
await plugin_manager.after_action(
|
|
1995
|
+
core.Event(
|
|
1996
|
+
space_name=entry_space,
|
|
1997
|
+
subpath=entry_doc["subpath"],
|
|
1998
|
+
shortname=entry_doc["shortname"],
|
|
1999
|
+
action_type=core.ActionType.view,
|
|
2000
|
+
resource_type=entry_doc["resource_type"],
|
|
2001
|
+
user_shortname=logged_in_user,
|
|
2002
|
+
)
|
|
2003
|
+
)
|
|
2004
|
+
|
|
2005
|
+
return resource_base_record
|
|
2006
|
+
|
|
2007
|
+
async def delete_space(self, space_name, record, owner_shortname):
|
|
2008
|
+
os.system(f"rm -r {settings.spaces_folder}/{space_name}")
|
|
2009
|
+
|
|
2010
|
+
async def get_last_updated_entry(
|
|
2011
|
+
self,
|
|
2012
|
+
space_name: str,
|
|
2013
|
+
schema_names: list,
|
|
2014
|
+
retrieve_json_payload: bool,
|
|
2015
|
+
logged_in_user: str,
|
|
2016
|
+
):
|
|
2017
|
+
pass
|
|
2018
|
+
|
|
2019
|
+
async def get_group_users(self, group_name: str):
|
|
2020
|
+
async with RedisServices() as redis_services:
|
|
2021
|
+
users_docs = await redis_services.search(
|
|
2022
|
+
space_name=settings.management_space,
|
|
2023
|
+
schema_name="meta",
|
|
2024
|
+
filters={"subpath": ["users"]},
|
|
2025
|
+
limit=10000,
|
|
2026
|
+
offset=0,
|
|
2027
|
+
search=f"@groups:{{{group_name}}}",
|
|
2028
|
+
)
|
|
2029
|
+
|
|
2030
|
+
if users_docs:
|
|
2031
|
+
return users_docs["data"]
|
|
2032
|
+
|
|
2033
|
+
return []
|
|
2034
|
+
|
|
2035
|
+
async def is_user_verified(self, user_shortname: str | None, identifier: str | None) -> bool:
|
|
2036
|
+
async with RedisServices() as redis_services:
|
|
2037
|
+
user: dict = await redis_services.get_doc_by_id(f"management:meta:users/{user_shortname}")
|
|
2038
|
+
if user:
|
|
2039
|
+
if identifier == "msisdn":
|
|
2040
|
+
return bool(user.get("is_msisdn_verified", True))
|
|
2041
|
+
if identifier == "email":
|
|
2042
|
+
return bool(user.get("is_email_verified", True))
|
|
2043
|
+
return False
|