dmart 1.4.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alembic.ini +117 -0
- api/__init__.py +0 -0
- api/info/__init__.py +0 -0
- api/info/router.py +109 -0
- api/managed/__init__.py +0 -0
- api/managed/router.py +1541 -0
- api/managed/utils.py +1879 -0
- api/public/__init__.py +0 -0
- api/public/router.py +758 -0
- api/qr/__init__.py +0 -0
- api/qr/router.py +108 -0
- api/user/__init__.py +0 -0
- api/user/model/__init__.py +0 -0
- api/user/model/errors.py +14 -0
- api/user/model/requests.py +165 -0
- api/user/model/responses.py +11 -0
- api/user/router.py +1413 -0
- api/user/service.py +270 -0
- bundler.py +55 -0
- config/__init__.py +0 -0
- config/channels.json +11 -0
- config/notification.json +17 -0
- cxb/__init__.py +0 -0
- cxb/client/__init__.py +0 -0
- cxb/client/assets/@codemirror-Rn7_6DkE.js +10 -0
- cxb/client/assets/@edraj-CS4NwVbD.js +1 -0
- cxb/client/assets/@floating-ui-BwwcF-xh.js +1 -0
- cxb/client/assets/@formatjs-yKEsAtjs.js +1 -0
- cxb/client/assets/@fortawesome-DRW1UCdr.js +9 -0
- cxb/client/assets/@jsonquerylang-laKNoFFq.js +12 -0
- cxb/client/assets/@lezer-za4Q-8Ew.js +1 -0
- cxb/client/assets/@marijn-DXwl3gUT.js +1 -0
- cxb/client/assets/@popperjs-l0sNRNKZ.js +1 -0
- cxb/client/assets/@replit--ERk53eB.js +1 -0
- cxb/client/assets/@roxi-CGMFK4i8.js +6 -0
- cxb/client/assets/@typewriter-cCzskkIv.js +17 -0
- cxb/client/assets/@zerodevx-BlBZjKxu.js +1 -0
- cxb/client/assets/@zerodevx-CVEpe6WZ.css +1 -0
- cxb/client/assets/BreadCrumbLite-DAhOx38v.js +1 -0
- cxb/client/assets/EntryRenderer-25YDhRen.js +32 -0
- cxb/client/assets/EntryRenderer-DXytdFp9.css +1 -0
- cxb/client/assets/ListView-BpAycA2h.js +16 -0
- cxb/client/assets/ListView-U8of-_c-.css +1 -0
- cxb/client/assets/Prism--hMplq-p.js +3 -0
- cxb/client/assets/Prism-Uh6uStUw.css +1 -0
- cxb/client/assets/Table2Cols-BsbwicQm.js +1 -0
- cxb/client/assets/_..-BvT6vdHa.css +1 -0
- cxb/client/assets/_...404_-fuLH_rX9.js +2 -0
- cxb/client/assets/_...fallback_-Ba_NLmAE.js +1 -0
- cxb/client/assets/_module-Bfk8MiCs.js +3 -0
- cxb/client/assets/_module-CEW0D5oI.js +4 -0
- cxb/client/assets/_module-Dgq0ZVtz.js +1 -0
- cxb/client/assets/ajv-Cpj98o6Y.js +1 -0
- cxb/client/assets/axios-CG2WSiiR.js +6 -0
- cxb/client/assets/clsx-B-dksMZM.js +1 -0
- cxb/client/assets/codemirror-wrapped-line-indent-DPhKvljI.js +1 -0
- cxb/client/assets/compare-C3AjiGFR.js +1 -0
- cxb/client/assets/compute-scroll-into-view-Bl8rNFhg.js +1 -0
- cxb/client/assets/consolite-DlCuI0F9.js +1 -0
- cxb/client/assets/crelt-C8TCjufn.js +1 -0
- cxb/client/assets/date-fns-l0sNRNKZ.js +1 -0
- cxb/client/assets/deepmerge-rn4rBaHU.js +1 -0
- cxb/client/assets/dmart_services-AL6-IdDE.js +1 -0
- cxb/client/assets/downloadFile-D08i0YDh.js +1 -0
- cxb/client/assets/easy-signal-BiPFIK3O.js +1 -0
- cxb/client/assets/esm-env-rsSWfq8L.js +1 -0
- cxb/client/assets/export-OF_rTiXu.js +1 -0
- cxb/client/assets/fast-deep-equal-l0sNRNKZ.js +1 -0
- cxb/client/assets/fast-diff-C-IidNf4.js +1 -0
- cxb/client/assets/fast-uri-l0sNRNKZ.js +1 -0
- cxb/client/assets/flowbite-svelte-BLvjb-sa.js +1 -0
- cxb/client/assets/flowbite-svelte-CD54FDqW.css +1 -0
- cxb/client/assets/flowbite-svelte-icons-BI8GVhw_.js +1 -0
- cxb/client/assets/github-slugger-CQ4oX9Ud.js +1 -0
- cxb/client/assets/global-igKv-1g9.js +1 -0
- cxb/client/assets/hookar-BMRD9G9H.js +1 -0
- cxb/client/assets/immutable-json-patch-DtRO2E_S.js +1 -0
- cxb/client/assets/import-1vE3gBat.js +1 -0
- cxb/client/assets/index-B-eTh-ZX.js +1 -0
- cxb/client/assets/index-BVyxzKtH.js +1 -0
- cxb/client/assets/index-BdeNM69f.js +1 -0
- cxb/client/assets/index-C6cPO4op.js +1 -0
- cxb/client/assets/index-CC-A1ipE.js +1 -0
- cxb/client/assets/index-CTxJ-lDp.js +1 -0
- cxb/client/assets/index-Cd-F5j_k.js +1 -0
- cxb/client/assets/index-D742rwaM.js +1 -0
- cxb/client/assets/index-DTfhnhwd.js +1 -0
- cxb/client/assets/index-DdXRK7n9.js +2 -0
- cxb/client/assets/index-DtiCmB4o.js +1 -0
- cxb/client/assets/index-NBrXBlLA.css +2 -0
- cxb/client/assets/index-ac-Buu_H.js +4 -0
- cxb/client/assets/index-iYkH7C67.js +1 -0
- cxb/client/assets/info-B986lRiM.js +1 -0
- cxb/client/assets/intl-messageformat-Dc5UU-HB.js +3 -0
- cxb/client/assets/jmespath-l0sNRNKZ.js +1 -0
- cxb/client/assets/json-schema-traverse-l0sNRNKZ.js +1 -0
- cxb/client/assets/json-source-map-DRgZidqy.js +5 -0
- cxb/client/assets/jsonpath-plus-l0sNRNKZ.js +1 -0
- cxb/client/assets/jsonrepair-B30Dx381.js +8 -0
- cxb/client/assets/lodash-es-DZVAA2ox.js +1 -0
- cxb/client/assets/marked-DKjyhwJX.js +56 -0
- cxb/client/assets/marked-gfm-heading-id-U5zO829x.js +2 -0
- cxb/client/assets/marked-mangle-CDMeiHC6.js +1 -0
- cxb/client/assets/memoize-one-BdPwpGay.js +1 -0
- cxb/client/assets/natural-compare-lite-Bg2Xcf-o.js +7 -0
- cxb/client/assets/pagination-svelte-D5CyoiE_.js +13 -0
- cxb/client/assets/pagination-svelte-v10nAbbM.css +1 -0
- cxb/client/assets/plantuml-encoder-C47mzt9T.js +1 -0
- cxb/client/assets/prismjs-DTUiLGJu.js +9 -0
- cxb/client/assets/profile-BUf-tKMe.js +1 -0
- cxb/client/assets/query-CNmXTsgf.js +1 -0
- cxb/client/assets/queryHelpers-C9iBWwqe.js +1 -0
- cxb/client/assets/scroll-into-view-if-needed-KR58zyjF.js +1 -0
- cxb/client/assets/spaces-0oyGvpii.js +1 -0
- cxb/client/assets/style-mod-Bs6eFhZE.js +3 -0
- cxb/client/assets/svelte-B2XmcTi_.js +4 -0
- cxb/client/assets/svelte-awesome-COLlx0DN.css +1 -0
- cxb/client/assets/svelte-awesome-DhnMA6Q_.js +1 -0
- cxb/client/assets/svelte-datatables-net-CY7LBj6I.js +1 -0
- cxb/client/assets/svelte-floating-ui-BlS3sOAQ.js +1 -0
- cxb/client/assets/svelte-i18n-CT2KkQaN.js +3 -0
- cxb/client/assets/svelte-jsoneditor-BzfX6Usi.css +1 -0
- cxb/client/assets/svelte-jsoneditor-CUGSvWId.js +25 -0
- cxb/client/assets/svelte-select-CegQKzqH.css +1 -0
- cxb/client/assets/svelte-select-CjHAt_85.js +6 -0
- cxb/client/assets/tailwind-merge-CJvxXMcu.js +1 -0
- cxb/client/assets/tailwind-variants-Cj20BoQ3.js +1 -0
- cxb/client/assets/toast-B9WDyfyI.js +1 -0
- cxb/client/assets/tslib-pJfR_DrR.js +1 -0
- cxb/client/assets/typewriter-editor-DkTVIJdm.js +25 -0
- cxb/client/assets/user-DeK_NB5v.js +1 -0
- cxb/client/assets/vanilla-picker-l5rcX3cq.js +8 -0
- cxb/client/assets/w3c-keyname-Vcq4gwWv.js +1 -0
- cxb/client/config.json +11 -0
- cxb/client/config.sample.json +11 -0
- cxb/client/favicon.ico +0 -0
- cxb/client/favicon.png +0 -0
- cxb/client/index.html +28 -0
- data_adapters/__init__.py +0 -0
- data_adapters/adapter.py +16 -0
- data_adapters/base_data_adapter.py +467 -0
- data_adapters/file/__init__.py +0 -0
- data_adapters/file/adapter.py +2043 -0
- data_adapters/file/adapter_helpers.py +1013 -0
- data_adapters/file/archive.py +150 -0
- data_adapters/file/create_index.py +331 -0
- data_adapters/file/create_users_folders.py +52 -0
- data_adapters/file/custom_validations.py +68 -0
- data_adapters/file/drop_index.py +40 -0
- data_adapters/file/health_check.py +560 -0
- data_adapters/file/redis_services.py +1110 -0
- data_adapters/helpers.py +27 -0
- data_adapters/sql/__init__.py +0 -0
- data_adapters/sql/adapter.py +3218 -0
- data_adapters/sql/adapter_helpers.py +491 -0
- data_adapters/sql/create_tables.py +451 -0
- data_adapters/sql/create_users_folders.py +53 -0
- data_adapters/sql/db_to_json_migration.py +485 -0
- data_adapters/sql/health_check_sql.py +232 -0
- data_adapters/sql/json_to_db_migration.py +454 -0
- data_adapters/sql/update_query_policies.py +101 -0
- data_generator.py +81 -0
- dmart-1.4.17.dist-info/METADATA +65 -0
- dmart-1.4.17.dist-info/RECORD +289 -0
- dmart-1.4.17.dist-info/WHEEL +5 -0
- dmart-1.4.17.dist-info/entry_points.txt +2 -0
- dmart-1.4.17.dist-info/top_level.txt +24 -0
- dmart.py +623 -0
- dmart_migrations/README +1 -0
- dmart_migrations/__init__.py +0 -0
- dmart_migrations/__pycache__/__init__.cpython-314.pyc +0 -0
- dmart_migrations/__pycache__/env.cpython-314.pyc +0 -0
- dmart_migrations/env.py +100 -0
- dmart_migrations/notes.txt +11 -0
- dmart_migrations/script.py.mako +28 -0
- dmart_migrations/scripts/__init__.py +0 -0
- dmart_migrations/scripts/calculate_checksums.py +77 -0
- dmart_migrations/scripts/migration_f7a4949eed19.py +28 -0
- dmart_migrations/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- dmart_migrations/versions/10d2041b94d4_last_checksum_history.py +62 -0
- dmart_migrations/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- dmart_migrations/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- dmart_migrations/versions/3c8bca2219cc_add_otp_table.py +38 -0
- dmart_migrations/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- dmart_migrations/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- dmart_migrations/versions/74288ccbd3b5_initial.py +264 -0
- dmart_migrations/versions/7520a89a8467_rm_activesession_table.py +39 -0
- dmart_migrations/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- dmart_migrations/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- dmart_migrations/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- dmart_migrations/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- dmart_migrations/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- dmart_migrations/versions/__init__.py +0 -0
- dmart_migrations/versions/__pycache__/0f3d2b1a7c21_add_authz_materialized_views.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/10d2041b94d4_last_checksum_history.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/26bfe19b49d4_rm_failedloginattempts.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/3c8bca2219cc_add_otp_table.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/6675fd9dfe42_remove_unique_from_sessions_table.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/71bc1df82e6a_adding_user_last_login_at.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/74288ccbd3b5_initial.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/7520a89a8467_rm_activesession_table.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/848b623755a4_make_created_nd_updated_at_required.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/8640dcbebf85_add_notes_to_users.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/91c94250232a_adding_fk_on_owner_shortname.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/98ecd6f56f9a_ext_meta_with_owner_group_shortname.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/9aae9138c4ef_indexing_created_at_updated_at.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/b53f916b3f6d_json_to_jsonb.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/eb5f1ec65156_adding_user_locked_to_device.cpython-314.pyc +0 -0
- dmart_migrations/versions/__pycache__/f7a4949eed19_adding_query_policies_to_meta.cpython-314.pyc +0 -0
- dmart_migrations/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- dmart_migrations/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- dmart_migrations/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- get_settings.py +7 -0
- info.json +1 -0
- languages/__init__.py +0 -0
- languages/arabic.json +15 -0
- languages/english.json +16 -0
- languages/kurdish.json +14 -0
- languages/loader.py +12 -0
- main.py +560 -0
- migrate.py +24 -0
- models/__init__.py +0 -0
- models/api.py +203 -0
- models/core.py +597 -0
- models/enums.py +255 -0
- password_gen.py +8 -0
- plugins/__init__.py +0 -0
- plugins/action_log/__init__.py +0 -0
- plugins/action_log/plugin.py +121 -0
- plugins/admin_notification_sender/__init__.py +0 -0
- plugins/admin_notification_sender/plugin.py +124 -0
- plugins/ldap_manager/__init__.py +0 -0
- plugins/ldap_manager/plugin.py +100 -0
- plugins/local_notification/__init__.py +0 -0
- plugins/local_notification/plugin.py +123 -0
- plugins/realtime_updates_notifier/__init__.py +0 -0
- plugins/realtime_updates_notifier/plugin.py +58 -0
- plugins/redis_db_update/__init__.py +0 -0
- plugins/redis_db_update/plugin.py +188 -0
- plugins/resource_folders_creation/__init__.py +0 -0
- plugins/resource_folders_creation/plugin.py +81 -0
- plugins/system_notification_sender/__init__.py +0 -0
- plugins/system_notification_sender/plugin.py +188 -0
- plugins/update_access_controls/__init__.py +0 -0
- plugins/update_access_controls/plugin.py +9 -0
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +16 -0
- pytests/api_user_models_requests_test.py +98 -0
- pytests/archive_test.py +72 -0
- pytests/base_test.py +300 -0
- pytests/get_settings_test.py +14 -0
- pytests/json_to_db_migration_test.py +237 -0
- pytests/service_test.py +26 -0
- pytests/test_info.py +55 -0
- pytests/test_status.py +15 -0
- run_notification_campaign.py +85 -0
- scheduled_notification_handler.py +121 -0
- schema_migration.py +208 -0
- schema_modulate.py +192 -0
- set_admin_passwd.py +55 -0
- sync.py +202 -0
- utils/__init__.py +0 -0
- utils/access_control.py +306 -0
- utils/async_request.py +8 -0
- utils/exporter.py +309 -0
- utils/firebase_notifier.py +57 -0
- utils/generate_email.py +37 -0
- utils/helpers.py +352 -0
- utils/hypercorn_config.py +12 -0
- utils/internal_error_code.py +60 -0
- utils/jwt.py +124 -0
- utils/logger.py +167 -0
- utils/middleware.py +99 -0
- utils/notification.py +75 -0
- utils/password_hashing.py +16 -0
- utils/plugin_manager.py +202 -0
- utils/query_policies_helper.py +128 -0
- utils/regex.py +44 -0
- utils/repository.py +529 -0
- utils/router_helper.py +19 -0
- utils/settings.py +166 -0
- utils/sms_notifier.py +21 -0
- utils/social_sso.py +67 -0
- utils/templates/activation.html.j2 +26 -0
- utils/templates/reminder.html.j2 +17 -0
- utils/ticket_sys_utils.py +203 -0
- utils/web_notifier.py +29 -0
- websocket.py +231 -0
schema_migration.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import asyncio
|
|
3
|
+
from enum import Enum
|
|
4
|
+
import re
|
|
5
|
+
import sys
|
|
6
|
+
from models.enums import ContentType
|
|
7
|
+
from utils import helpers
|
|
8
|
+
from data_adapters.adapter import data_adapter as db
|
|
9
|
+
from models.core import Meta, Schema
|
|
10
|
+
from utils.settings import settings
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class FieldType(Enum):
|
|
14
|
+
string = "string"
|
|
15
|
+
number = "number"
|
|
16
|
+
integer = "integer"
|
|
17
|
+
array = "array"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
FIELD_TYPE_PARSER: dict = {
|
|
21
|
+
"string": str,
|
|
22
|
+
"number": int,
|
|
23
|
+
"integer": int,
|
|
24
|
+
"array": list
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def change_field_type(
|
|
29
|
+
space: str,
|
|
30
|
+
schema_model: Schema,
|
|
31
|
+
schema_payload: dict,
|
|
32
|
+
field: str,
|
|
33
|
+
new_type: FieldType
|
|
34
|
+
):
|
|
35
|
+
# 3-update field type to new_type
|
|
36
|
+
schema_properties = schema_payload["properties"]
|
|
37
|
+
await db.internal_sys_update_model(
|
|
38
|
+
space_name=space,
|
|
39
|
+
subpath="schema",
|
|
40
|
+
meta=schema_model,
|
|
41
|
+
payload_dict=schema_payload,
|
|
42
|
+
updates={
|
|
43
|
+
"properties": schema_properties
|
|
44
|
+
},
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
updated_num = 0
|
|
48
|
+
|
|
49
|
+
# 4-loop over space by glob "*/.dm/*/meta.*.json"
|
|
50
|
+
path = settings.spaces_folder / space
|
|
51
|
+
entries_glob = "*/.dm/*/meta.*.json"
|
|
52
|
+
FILE_PATTERN = re.compile("(\\w*)\\/\\.dm\\/(\\w*)\\/meta\\.([a-zA-Z]*)\\.json$")
|
|
53
|
+
for one in path.glob(entries_glob):
|
|
54
|
+
match = FILE_PATTERN.search(str(one))
|
|
55
|
+
if not match or not one.is_file():
|
|
56
|
+
continue
|
|
57
|
+
subpath = match.group(1)
|
|
58
|
+
shortname = match.group(2)
|
|
59
|
+
resource_type = match.group(3)
|
|
60
|
+
resource_cls = getattr(
|
|
61
|
+
sys.modules["models.core"], helpers.camel_case(resource_type)
|
|
62
|
+
)
|
|
63
|
+
try:
|
|
64
|
+
resource_obj: Meta = await db.load(
|
|
65
|
+
space_name=space,
|
|
66
|
+
subpath=subpath,
|
|
67
|
+
shortname=shortname,
|
|
68
|
+
class_type=resource_cls
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# 5-if resource.schema_shortname == schema:
|
|
72
|
+
if(
|
|
73
|
+
not resource_obj.payload or
|
|
74
|
+
not isinstance(resource_obj.payload.body, str) or
|
|
75
|
+
resource_obj.payload.schema_shortname != schema_model.shortname
|
|
76
|
+
):
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
# 5.1-load payload file
|
|
80
|
+
resource_payload = await db.load_resource_payload(
|
|
81
|
+
space_name=space,
|
|
82
|
+
subpath=subpath,
|
|
83
|
+
filename=resource_obj.payload.body,
|
|
84
|
+
class_type=resource_cls
|
|
85
|
+
)
|
|
86
|
+
except Exception as ex:
|
|
87
|
+
print(f"Error loading {one}", ex)
|
|
88
|
+
continue
|
|
89
|
+
|
|
90
|
+
resource_payload_keys = helpers.flatten_dict(resource_payload)
|
|
91
|
+
if field not in resource_payload_keys or not resource_payload:
|
|
92
|
+
continue
|
|
93
|
+
|
|
94
|
+
# 5.2-parse field's old_type to new_type
|
|
95
|
+
field_tree = field.split(".")
|
|
96
|
+
last_idx = len(field_tree)-1
|
|
97
|
+
main_field = resource_payload[field_tree[0]]
|
|
98
|
+
field_to_update = main_field
|
|
99
|
+
for i in range(1, last_idx):
|
|
100
|
+
field_to_update = main_field[field_tree[i]] #type: ignore
|
|
101
|
+
field_to_update[field_tree[last_idx]] = FIELD_TYPE_PARSER[new_type](field_to_update[field_tree[last_idx]])#type: ignore
|
|
102
|
+
await db.internal_sys_update_model(
|
|
103
|
+
space_name=space,
|
|
104
|
+
subpath=subpath,
|
|
105
|
+
meta=resource_obj,
|
|
106
|
+
payload_dict=resource_payload,
|
|
107
|
+
updates={
|
|
108
|
+
field_tree[0]: main_field
|
|
109
|
+
},
|
|
110
|
+
)
|
|
111
|
+
updated_num += 1
|
|
112
|
+
|
|
113
|
+
return updated_num
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async def main(
|
|
117
|
+
space: str,
|
|
118
|
+
schema: str,
|
|
119
|
+
field: str,
|
|
120
|
+
old_type: FieldType,
|
|
121
|
+
new_type: FieldType
|
|
122
|
+
):
|
|
123
|
+
"""
|
|
124
|
+
Algorithm:
|
|
125
|
+
1-load schema
|
|
126
|
+
2-make sure field with old_type exist
|
|
127
|
+
3-update field type to new_type
|
|
128
|
+
4-loop over space by glob "*/.dm/*/meta.*.json"
|
|
129
|
+
5-if resource.schema_shortname == schema:
|
|
130
|
+
5.1-load payload file
|
|
131
|
+
5.2-parse field's old_type to new_type
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
# 1-load schema
|
|
135
|
+
schema_model: Schema = Schema.model_validate(
|
|
136
|
+
await db.load(
|
|
137
|
+
space_name=space,
|
|
138
|
+
subpath="schema",
|
|
139
|
+
shortname=schema,
|
|
140
|
+
class_type=Schema
|
|
141
|
+
)
|
|
142
|
+
)
|
|
143
|
+
if(
|
|
144
|
+
not schema_model.payload or
|
|
145
|
+
schema_model.payload.content_type != ContentType.json or
|
|
146
|
+
not isinstance(schema_model.payload.body, str)
|
|
147
|
+
):
|
|
148
|
+
print(f"Invalid schema file: \n{schema_model.model_dump_json()}")
|
|
149
|
+
return
|
|
150
|
+
schema_payload = await db.load_resource_payload(
|
|
151
|
+
space_name=space,
|
|
152
|
+
subpath="schema",
|
|
153
|
+
filename=schema_model.payload.body,
|
|
154
|
+
class_type=Schema
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
if not schema_payload:
|
|
158
|
+
return
|
|
159
|
+
|
|
160
|
+
# 2-make sure field with old_type exist
|
|
161
|
+
field_tree = field.split(".")
|
|
162
|
+
field_definition = schema_payload
|
|
163
|
+
for f in field_tree:
|
|
164
|
+
field_definition = field_definition["properties"].get(f, {})
|
|
165
|
+
|
|
166
|
+
if field_definition.get("type") != old_type:
|
|
167
|
+
print("Invalid old type for the specified field under the specified schema")
|
|
168
|
+
return
|
|
169
|
+
|
|
170
|
+
field_definition["type"] = new_type
|
|
171
|
+
|
|
172
|
+
updated_num = await change_field_type(
|
|
173
|
+
space,
|
|
174
|
+
schema_model,
|
|
175
|
+
schema_payload,
|
|
176
|
+
field,
|
|
177
|
+
new_type
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
print(f"Successfully updated the schema along with {updated_num} resource files")
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
if __name__ == "__main__":
|
|
184
|
+
parser = argparse.ArgumentParser(
|
|
185
|
+
description="Change field type in a specific Schema under a specific Space",
|
|
186
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
187
|
+
)
|
|
188
|
+
parser.add_argument("-p", "--space", help="Space name")
|
|
189
|
+
parser.add_argument("-c", "--schema", help="Schema name")
|
|
190
|
+
parser.add_argument("-f", "--field", help="Field name to change")
|
|
191
|
+
parser.add_argument(
|
|
192
|
+
"--old-type",
|
|
193
|
+
help="Field's old type, supported types: string, number, integer, array"
|
|
194
|
+
)
|
|
195
|
+
parser.add_argument(
|
|
196
|
+
"--new-type",
|
|
197
|
+
help="Field's new type, supported types: string, number, integer, array"
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
args = parser.parse_args()
|
|
201
|
+
|
|
202
|
+
asyncio.run(main(
|
|
203
|
+
args.space,
|
|
204
|
+
args.schema,
|
|
205
|
+
args.field,
|
|
206
|
+
args.old_type,
|
|
207
|
+
args.new_type
|
|
208
|
+
))
|
schema_modulate.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
#!/usr/bin/env -S BACKEND_ENV=config.env python3
|
|
2
|
+
import asyncio
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import update
|
|
6
|
+
from typing import Any
|
|
7
|
+
from data_adapters.sql.adapter import SQLAdapter
|
|
8
|
+
from data_adapters.sql.create_tables import Users, Roles, Permissions, Entries, Spaces, Attachments
|
|
9
|
+
from utils.settings import settings
|
|
10
|
+
import argparse
|
|
11
|
+
from sqlmodel import select, col
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
'''
|
|
15
|
+
--space and --subpath are optional
|
|
16
|
+
|
|
17
|
+
# add new key to the records
|
|
18
|
+
## with default value
|
|
19
|
+
./schema_modulate.py --space management --subpath users -t +payload.body.xxx -v 123
|
|
20
|
+
## default value is None
|
|
21
|
+
./schema_modulate.py --space management --subpath users -t +payload.body.xxx
|
|
22
|
+
|
|
23
|
+
# remove key from the records
|
|
24
|
+
./schema_modulate.py --space management --subpath users -t ~payload.body.xxx
|
|
25
|
+
|
|
26
|
+
# update key in the records
|
|
27
|
+
./schema_modulate.py --space management --subpath users -t payload.body.xxx -v yyy
|
|
28
|
+
'''
|
|
29
|
+
|
|
30
|
+
class ResourceType(StrEnum):
|
|
31
|
+
add = "add"
|
|
32
|
+
remove = "remove"
|
|
33
|
+
update = "update"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
async def handle_sql_modulation(args):
|
|
37
|
+
spaces: list[Any] = []
|
|
38
|
+
if args.space:
|
|
39
|
+
if args.space == "management":
|
|
40
|
+
if args.subpath is None:
|
|
41
|
+
spaces = []
|
|
42
|
+
if args.subpath == "~attachments":
|
|
43
|
+
spaces = [Attachments]
|
|
44
|
+
else:
|
|
45
|
+
if args.subpath == "users":
|
|
46
|
+
spaces = [Users]
|
|
47
|
+
elif args.subpath == "roles":
|
|
48
|
+
spaces = [Roles]
|
|
49
|
+
elif args.subpath == "permissions":
|
|
50
|
+
spaces = [Permissions]
|
|
51
|
+
elif args.subpath == "~spaces":
|
|
52
|
+
spaces = [Permissions]
|
|
53
|
+
else:
|
|
54
|
+
spaces = [Entries]
|
|
55
|
+
else:
|
|
56
|
+
spaces = [Entries]
|
|
57
|
+
if args.subpath == "~attachments":
|
|
58
|
+
spaces.append(Attachments)
|
|
59
|
+
else:
|
|
60
|
+
spaces = [Entries, Users, Roles, Permissions, Spaces, Attachments]
|
|
61
|
+
|
|
62
|
+
targets = args.target.split(".")
|
|
63
|
+
|
|
64
|
+
state = ResourceType.update
|
|
65
|
+
if targets[0].startswith("+"):
|
|
66
|
+
print(f"[info] Adding new key '{targets[0]}' to the records")
|
|
67
|
+
state = ResourceType.add
|
|
68
|
+
targets[0] = targets[0][1:]
|
|
69
|
+
|
|
70
|
+
if args.value:
|
|
71
|
+
print("[warn] flag -v --value is not required for adding new key")
|
|
72
|
+
if targets[0].startswith("~"):
|
|
73
|
+
print(f"[info] Removing key '{targets[0]}' from the records")
|
|
74
|
+
state = ResourceType.remove
|
|
75
|
+
targets[0] = targets[0][1:]
|
|
76
|
+
|
|
77
|
+
if args.value:
|
|
78
|
+
print("[warn] flag -v --value is not required for removing key")
|
|
79
|
+
else:
|
|
80
|
+
print(f"[info] Altering the key '{targets[0]}' fo records")\
|
|
81
|
+
|
|
82
|
+
if targets[0] not in ["description", "displayname", "payload"]:
|
|
83
|
+
raise Exception("target must be either 'description', 'displayname' or 'payload'")
|
|
84
|
+
|
|
85
|
+
async with SQLAdapter().get_session() as session:
|
|
86
|
+
for space in spaces:
|
|
87
|
+
print("[info] Processing...", space)
|
|
88
|
+
|
|
89
|
+
statement = select(space)
|
|
90
|
+
|
|
91
|
+
if space not in [Users, Roles, Permissions, Spaces, Attachments]:
|
|
92
|
+
statement = statement.where(space.shortname == args.space)
|
|
93
|
+
if args.subpath:
|
|
94
|
+
statement = statement.where(space.subpath == args.subpath)
|
|
95
|
+
|
|
96
|
+
records = (await session.execute(statement)).all()
|
|
97
|
+
records = [record[0] for record in records]
|
|
98
|
+
print("[info] # Records found:", len(records))
|
|
99
|
+
print("[info] state:", state)
|
|
100
|
+
for record in records:
|
|
101
|
+
if hasattr(record, targets[0]):
|
|
102
|
+
if state == ResourceType.update:
|
|
103
|
+
obj = getattr(record, targets[0])
|
|
104
|
+
if obj:
|
|
105
|
+
keys = targets
|
|
106
|
+
sub_obj = obj
|
|
107
|
+
for key in keys[1:-1]:
|
|
108
|
+
if not bool(sub_obj):
|
|
109
|
+
break
|
|
110
|
+
sub_obj = sub_obj.get(key, {})
|
|
111
|
+
|
|
112
|
+
if not bool(sub_obj):
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
if keys[-1] in sub_obj:
|
|
116
|
+
sub_obj[args.value] = sub_obj.pop(keys[-1])
|
|
117
|
+
|
|
118
|
+
elif state == ResourceType.add:
|
|
119
|
+
obj = getattr(record, targets[0])
|
|
120
|
+
if obj is None:
|
|
121
|
+
setattr(record, targets[0], {})
|
|
122
|
+
keys = targets
|
|
123
|
+
sub_obj = obj
|
|
124
|
+
for key in keys[1:-1]:
|
|
125
|
+
if not bool(sub_obj):
|
|
126
|
+
break
|
|
127
|
+
sub_obj = sub_obj.get(key, {})
|
|
128
|
+
|
|
129
|
+
if not bool(sub_obj):
|
|
130
|
+
continue
|
|
131
|
+
|
|
132
|
+
if keys[-1] not in sub_obj:
|
|
133
|
+
sub_obj[keys[-1]] = args.value
|
|
134
|
+
|
|
135
|
+
elif state == ResourceType.remove:
|
|
136
|
+
obj = getattr(record, targets[0])
|
|
137
|
+
if obj:
|
|
138
|
+
keys = targets
|
|
139
|
+
sub_obj = obj
|
|
140
|
+
for key in keys[1:-1]:
|
|
141
|
+
sub_obj = sub_obj.get(key, {})
|
|
142
|
+
if not bool(sub_obj):
|
|
143
|
+
continue
|
|
144
|
+
if keys[-1] in sub_obj:
|
|
145
|
+
sub_obj.pop(keys[-1])
|
|
146
|
+
|
|
147
|
+
print(obj)
|
|
148
|
+
setattr(record, targets[0], obj)
|
|
149
|
+
|
|
150
|
+
stmt = update(space).where(col(space.uuid )== record.uuid).values(
|
|
151
|
+
**{targets[0]: getattr(record, targets[0])})
|
|
152
|
+
await session.execute(stmt) #type: ignore
|
|
153
|
+
await session.commit()
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def handle_file_modulation(args):
|
|
158
|
+
pass
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
if __name__ == "__main__":
|
|
163
|
+
parser = argparse.ArgumentParser(
|
|
164
|
+
description="Modulate schema field type",
|
|
165
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
parser.add_argument(
|
|
169
|
+
"--space",
|
|
170
|
+
required=True
|
|
171
|
+
)
|
|
172
|
+
parser.add_argument(
|
|
173
|
+
"--subpath",
|
|
174
|
+
default=None
|
|
175
|
+
)
|
|
176
|
+
parser.add_argument(
|
|
177
|
+
"-t", "--target",
|
|
178
|
+
required=True
|
|
179
|
+
)
|
|
180
|
+
parser.add_argument(
|
|
181
|
+
"-v", "--value",
|
|
182
|
+
default=None
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
args = parser.parse_args()
|
|
186
|
+
|
|
187
|
+
if settings.active_data_db == "sql":
|
|
188
|
+
asyncio.run(
|
|
189
|
+
handle_sql_modulation(args)
|
|
190
|
+
)
|
|
191
|
+
else:
|
|
192
|
+
handle_file_modulation(args)
|
set_admin_passwd.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
#!/usr/bin/env -S BACKEND_ENV=config.env python3
|
|
2
|
+
import asyncio
|
|
3
|
+
import json
|
|
4
|
+
import getpass
|
|
5
|
+
import subprocess
|
|
6
|
+
import utils.regex as regex
|
|
7
|
+
import re
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from sqlmodel import select
|
|
10
|
+
|
|
11
|
+
from data_adapters.sql.adapter import SQLAdapter
|
|
12
|
+
from data_adapters.sql.create_tables import Users
|
|
13
|
+
from utils.password_hashing import hash_password
|
|
14
|
+
from utils.settings import settings
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
users : dict[str, dict]= {"dmart":{}, "alibaba": {}}
|
|
18
|
+
|
|
19
|
+
while True:
|
|
20
|
+
password = getpass.getpass("Type the new password for admin/testuser then hit enter: ")
|
|
21
|
+
if re.match(regex.PASSWORD, password):
|
|
22
|
+
break
|
|
23
|
+
else:
|
|
24
|
+
print("Password didn't match the rules: >= 8 chars that are Alphanumeric mix cap/small with _#@%*!?$^- ")
|
|
25
|
+
|
|
26
|
+
print("Generating and storing the password for dmart and alibaba")
|
|
27
|
+
hashed = hash_password(password)
|
|
28
|
+
|
|
29
|
+
async def main():
|
|
30
|
+
if settings.active_data_db == "file":
|
|
31
|
+
for key in users.keys():
|
|
32
|
+
file_name = settings.spaces_folder / f"management/users/.dm/{key}/meta.user.json"
|
|
33
|
+
with open(file_name, 'r') as read_file:
|
|
34
|
+
data = json.load(read_file)
|
|
35
|
+
data["password"] = hashed
|
|
36
|
+
with open(file_name, 'w') as write_file:
|
|
37
|
+
write_file.write(json.dumps(data))
|
|
38
|
+
else:
|
|
39
|
+
async with SQLAdapter().get_session() as session:
|
|
40
|
+
for key in users.keys():
|
|
41
|
+
statement = select(Users).where(Users.shortname == key)
|
|
42
|
+
user = (await session.execute(statement)).one()[0]
|
|
43
|
+
user.password=hashed
|
|
44
|
+
user.is_active=True
|
|
45
|
+
session.add(user)
|
|
46
|
+
await session.commit()
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
asyncio.run(main())
|
|
50
|
+
|
|
51
|
+
login_creds_sample = Path(__file__).resolve().parent / "login_creds.sh.sample"
|
|
52
|
+
if login_creds_sample.exists():
|
|
53
|
+
with open("./login_creds.sh", 'w') as creds:
|
|
54
|
+
subprocess.run( [ "sed", f"s/xxxx/{password}/g", str(login_creds_sample) ], stdout=creds)
|
|
55
|
+
print("Done")
|
sync.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import hashlib
|
|
3
|
+
import json
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
from models.enums import RequestType
|
|
7
|
+
from utils.settings import settings
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
local_username = "dmart"
|
|
11
|
+
local_password = "Test1234"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
headers = {
|
|
15
|
+
"accept": "application/json, text/plain, */*",
|
|
16
|
+
}
|
|
17
|
+
target_headers = {
|
|
18
|
+
**headers,
|
|
19
|
+
}
|
|
20
|
+
local_headers = {
|
|
21
|
+
**headers,
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
body = {
|
|
25
|
+
"filter_shortnames": [],
|
|
26
|
+
"type": "search",
|
|
27
|
+
"exact_subpath": True,
|
|
28
|
+
"limit": 100,
|
|
29
|
+
"offset": 0,
|
|
30
|
+
"search": "",
|
|
31
|
+
"retrieve_json_payload": True,
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def login(username, password, target):
|
|
36
|
+
body = {
|
|
37
|
+
"shortname": username,
|
|
38
|
+
"password": password,
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
response = requests.post(f"{target}/user/login", headers=headers, json=body,)
|
|
42
|
+
|
|
43
|
+
if response.ok:
|
|
44
|
+
return response.json()["records"][0]["attributes"]["access_token"]
|
|
45
|
+
else:
|
|
46
|
+
print(f"Error: {response.status_code}, {response.text}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def hash_records(local_records, target_records):
|
|
50
|
+
hashed_local_records = {}
|
|
51
|
+
hashed_target_records = {}
|
|
52
|
+
for local_record in local_records:
|
|
53
|
+
sha1 = hashlib.sha1()
|
|
54
|
+
r = {
|
|
55
|
+
"shortname": local_record["shortname"],
|
|
56
|
+
"displayname": local_record.get("attributes", {}).get("displayname", {}),
|
|
57
|
+
"description": local_record.get("attributes", {}).get("description", {}),
|
|
58
|
+
}
|
|
59
|
+
if local_record.get("attributes", {}).get("payload", {}):
|
|
60
|
+
r["payload"] = local_record.get("attributes", {}).get("payload", {}).get("checksum", {})
|
|
61
|
+
else:
|
|
62
|
+
r["payload"] = None
|
|
63
|
+
sha1.update(json.dumps(r).encode())
|
|
64
|
+
checksum = sha1.hexdigest()
|
|
65
|
+
hashed_local_records[local_record["shortname"]] = checksum
|
|
66
|
+
|
|
67
|
+
for target_record in target_records:
|
|
68
|
+
sha1 = hashlib.sha1()
|
|
69
|
+
r = {
|
|
70
|
+
"shortname": target_record["shortname"],
|
|
71
|
+
"displayname": target_record.get("attributes", {}).get("displayname", {}),
|
|
72
|
+
"description": target_record.get("attributes", {}).get("description", {}),
|
|
73
|
+
}
|
|
74
|
+
if target_record.get("attributes", {}).get("payload", {}):
|
|
75
|
+
r["payload"] = target_record.get("attributes", {}).get("payload", {}).get("checksum", {})
|
|
76
|
+
else:
|
|
77
|
+
r["payload"] = None
|
|
78
|
+
sha1.update(json.dumps(r).encode())
|
|
79
|
+
checksum = sha1.hexdigest()
|
|
80
|
+
hashed_target_records[target_record["shortname"]] = checksum
|
|
81
|
+
|
|
82
|
+
return hashed_local_records, hashed_target_records
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
local_records = []
|
|
86
|
+
target_records = []
|
|
87
|
+
def fetch_locators(space, subpath, target):
|
|
88
|
+
global local_records
|
|
89
|
+
global target_records
|
|
90
|
+
|
|
91
|
+
body["space_name"] = space
|
|
92
|
+
body["subpath"] = subpath
|
|
93
|
+
|
|
94
|
+
response_target = requests.post(f"{target}/managed/query", headers=target_headers, json=body,)
|
|
95
|
+
response_local = requests.post(f"http://{settings.listening_host}:{settings.listening_port}/managed/query", headers=local_headers, json=body,)
|
|
96
|
+
|
|
97
|
+
if response_target.ok:
|
|
98
|
+
target_records = response_target.json()["records"]
|
|
99
|
+
print('# target records:', len(target_records))
|
|
100
|
+
else:
|
|
101
|
+
print(f"Error: {response_target.status_code}, {response_target.text}")
|
|
102
|
+
|
|
103
|
+
if response_local.ok:
|
|
104
|
+
local_records = response_local.json()["records"]
|
|
105
|
+
print('# local records:', len(local_records))
|
|
106
|
+
else:
|
|
107
|
+
print(f"Error: {response_local.status_code}, {response_local.text}")
|
|
108
|
+
|
|
109
|
+
return local_records, target_records
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def get_diff(hashed_local_records, hashed_target_records):
|
|
113
|
+
added_records = {k: v for k, v in hashed_local_records.items() if k not in hashed_target_records}
|
|
114
|
+
removed_records = {k: v for k, v in hashed_target_records.items() if k not in hashed_local_records}
|
|
115
|
+
different_records = {k: v for k, v in hashed_local_records.items() if k in hashed_target_records and hashed_target_records[k] != v}
|
|
116
|
+
|
|
117
|
+
print(f"Added records: {added_records}")
|
|
118
|
+
print(f"Removed records: {removed_records}")
|
|
119
|
+
print(f"Different records: {different_records}")
|
|
120
|
+
return added_records, removed_records, different_records
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def apply_changes(space, target, added_records, removed_records, different_records):
|
|
124
|
+
added_records_shortnames = [record for record in local_records if record["shortname"] in added_records]
|
|
125
|
+
removed_records_shortnames = [record for record in target_records if record["shortname"] in removed_records]
|
|
126
|
+
different_records_shortnames = [record for record in local_records if record["shortname"] in different_records]
|
|
127
|
+
|
|
128
|
+
print(f"Added records shortnames: {added_records_shortnames}")
|
|
129
|
+
print(f"Removed records shortnames: {removed_records_shortnames}")
|
|
130
|
+
print(f"Different records shortnames: {different_records_shortnames}")
|
|
131
|
+
|
|
132
|
+
request_data = {
|
|
133
|
+
"space_name": space,
|
|
134
|
+
"request_type": RequestType.create,
|
|
135
|
+
"records": added_records_shortnames,
|
|
136
|
+
}
|
|
137
|
+
response = requests.post(f"{target}/managed/request", headers=target_headers, json=request_data,)
|
|
138
|
+
if response.ok:
|
|
139
|
+
print('records:', response.json())
|
|
140
|
+
else:
|
|
141
|
+
print(f"Error: {response.status_code}, {response.text}")
|
|
142
|
+
|
|
143
|
+
request_data = {
|
|
144
|
+
"space_name": space,
|
|
145
|
+
"request_type": RequestType.delete,
|
|
146
|
+
"records": removed_records_shortnames,
|
|
147
|
+
}
|
|
148
|
+
response = requests.post(f"{target}/managed/request", headers=target_headers, json=request_data,)
|
|
149
|
+
if response.ok:
|
|
150
|
+
print('records:', response.json())
|
|
151
|
+
else:
|
|
152
|
+
print(f"Error: {response.status_code}, {response.text}")
|
|
153
|
+
|
|
154
|
+
request_data = {
|
|
155
|
+
"space_name": space,
|
|
156
|
+
"request_type": RequestType.update,
|
|
157
|
+
"records": different_records_shortnames,
|
|
158
|
+
}
|
|
159
|
+
response = requests.post(f"{target}/managed/request", headers=target_headers, json=request_data,)
|
|
160
|
+
if response.ok:
|
|
161
|
+
print('records:', response.json())
|
|
162
|
+
else:
|
|
163
|
+
print(f"Error: {response.status_code}, {response.text}")
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def main():
|
|
167
|
+
parser = argparse.ArgumentParser(description="Process some arguments.")
|
|
168
|
+
parser.add_argument('-u', required=True, help='The username argument')
|
|
169
|
+
parser.add_argument('-p', required=True, help='The password argument')
|
|
170
|
+
parser.add_argument('-sp', required=True, help='The space argument')
|
|
171
|
+
parser.add_argument('-su', required=True, help='The subpath argument')
|
|
172
|
+
parser.add_argument('-t', required=True, help='The target argument')
|
|
173
|
+
parser.add_argument('-l', required=False, help='The limit argument')
|
|
174
|
+
parser.add_argument('-o', required=False, help='The offset argument')
|
|
175
|
+
|
|
176
|
+
args = parser.parse_args()
|
|
177
|
+
|
|
178
|
+
print(f">Space: {args.sp}")
|
|
179
|
+
print(f">Subpath: {args.su}")
|
|
180
|
+
print(f">Target: {args.t}")
|
|
181
|
+
print(f">Username: {args.u}")
|
|
182
|
+
|
|
183
|
+
if args.l:
|
|
184
|
+
body["limit"] = args.l
|
|
185
|
+
if args.o:
|
|
186
|
+
body["offset"] = args.o
|
|
187
|
+
|
|
188
|
+
local_token = login(local_username, local_password, f"http://{settings.listening_host}:{settings.listening_port}")
|
|
189
|
+
local_headers["Authorization"] = f"Bearer {local_token}"
|
|
190
|
+
|
|
191
|
+
target_token = login(args.u, args.p, args.t)
|
|
192
|
+
target_headers["Authorization"] = f"Bearer {target_token}"
|
|
193
|
+
|
|
194
|
+
local_records, target_records = fetch_locators(args.sp, args.su, args.t)
|
|
195
|
+
hashed_local_records, hashed_target_records = hash_records(local_records, target_records)
|
|
196
|
+
added_records, removed_records, different_records = get_diff(hashed_local_records, hashed_target_records)
|
|
197
|
+
apply_changes(args.sp, args.t, added_records, removed_records, different_records)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
if __name__ == "__main__":
|
|
202
|
+
main()
|
utils/__init__.py
ADDED
|
File without changes
|