dmart 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alembic/__init__.py +0 -0
- alembic/env.py +91 -0
- alembic/scripts/__init__.py +0 -0
- alembic/scripts/calculate_checksums.py +77 -0
- alembic/scripts/migration_f7a4949eed19.py +28 -0
- alembic/versions/0f3d2b1a7c21_add_authz_materialized_views.py +87 -0
- alembic/versions/10d2041b94d4_last_checksum_history.py +62 -0
- alembic/versions/1cf4e1ee3cb8_ext_permission_with_filter_fields_values.py +33 -0
- alembic/versions/26bfe19b49d4_rm_failedloginattempts.py +42 -0
- alembic/versions/3c8bca2219cc_add_otp_table.py +38 -0
- alembic/versions/6675fd9dfe42_remove_unique_from_sessions_table.py +36 -0
- alembic/versions/71bc1df82e6a_adding_user_last_login_at.py +43 -0
- alembic/versions/74288ccbd3b5_initial.py +264 -0
- alembic/versions/7520a89a8467_rm_activesession_table.py +39 -0
- alembic/versions/848b623755a4_make_created_nd_updated_at_required.py +138 -0
- alembic/versions/8640dcbebf85_add_notes_to_users.py +32 -0
- alembic/versions/91c94250232a_adding_fk_on_owner_shortname.py +104 -0
- alembic/versions/98ecd6f56f9a_ext_meta_with_owner_group_shortname.py +66 -0
- alembic/versions/9aae9138c4ef_indexing_created_at_updated_at.py +80 -0
- alembic/versions/__init__.py +0 -0
- alembic/versions/b53f916b3f6d_json_to_jsonb.py +492 -0
- alembic/versions/eb5f1ec65156_adding_user_locked_to_device.py +36 -0
- alembic/versions/f7a4949eed19_adding_query_policies_to_meta.py +60 -0
- api/__init__.py +0 -0
- api/info/__init__.py +0 -0
- api/info/router.py +109 -0
- api/managed/__init__.py +0 -0
- api/managed/router.py +1541 -0
- api/managed/utils.py +1850 -0
- api/public/__init__.py +0 -0
- api/public/router.py +758 -0
- api/qr/__init__.py +0 -0
- api/qr/router.py +108 -0
- api/user/__init__.py +0 -0
- api/user/model/__init__.py +0 -0
- api/user/model/errors.py +14 -0
- api/user/model/requests.py +165 -0
- api/user/model/responses.py +11 -0
- api/user/router.py +1401 -0
- api/user/service.py +270 -0
- bundler.py +44 -0
- config/__init__.py +0 -0
- config/channels.json +11 -0
- config/notification.json +17 -0
- data_adapters/__init__.py +0 -0
- data_adapters/adapter.py +16 -0
- data_adapters/base_data_adapter.py +467 -0
- data_adapters/file/__init__.py +0 -0
- data_adapters/file/adapter.py +2043 -0
- data_adapters/file/adapter_helpers.py +1013 -0
- data_adapters/file/archive.py +150 -0
- data_adapters/file/create_index.py +331 -0
- data_adapters/file/create_users_folders.py +52 -0
- data_adapters/file/custom_validations.py +68 -0
- data_adapters/file/drop_index.py +40 -0
- data_adapters/file/health_check.py +560 -0
- data_adapters/file/redis_services.py +1110 -0
- data_adapters/helpers.py +27 -0
- data_adapters/sql/__init__.py +0 -0
- data_adapters/sql/adapter.py +3210 -0
- data_adapters/sql/adapter_helpers.py +491 -0
- data_adapters/sql/create_tables.py +451 -0
- data_adapters/sql/create_users_folders.py +53 -0
- data_adapters/sql/db_to_json_migration.py +482 -0
- data_adapters/sql/health_check_sql.py +232 -0
- data_adapters/sql/json_to_db_migration.py +454 -0
- data_adapters/sql/update_query_policies.py +101 -0
- data_generator.py +81 -0
- dmart-0.1.9.dist-info/METADATA +64 -0
- dmart-0.1.9.dist-info/RECORD +149 -0
- dmart-0.1.9.dist-info/WHEEL +5 -0
- dmart-0.1.9.dist-info/entry_points.txt +2 -0
- dmart-0.1.9.dist-info/top_level.txt +23 -0
- dmart.py +513 -0
- get_settings.py +7 -0
- languages/__init__.py +0 -0
- languages/arabic.json +15 -0
- languages/english.json +16 -0
- languages/kurdish.json +14 -0
- languages/loader.py +13 -0
- main.py +506 -0
- migrate.py +24 -0
- models/__init__.py +0 -0
- models/api.py +203 -0
- models/core.py +597 -0
- models/enums.py +255 -0
- password_gen.py +8 -0
- plugins/__init__.py +0 -0
- plugins/action_log/__init__.py +0 -0
- plugins/action_log/plugin.py +121 -0
- plugins/admin_notification_sender/__init__.py +0 -0
- plugins/admin_notification_sender/plugin.py +124 -0
- plugins/ldap_manager/__init__.py +0 -0
- plugins/ldap_manager/plugin.py +100 -0
- plugins/local_notification/__init__.py +0 -0
- plugins/local_notification/plugin.py +123 -0
- plugins/realtime_updates_notifier/__init__.py +0 -0
- plugins/realtime_updates_notifier/plugin.py +58 -0
- plugins/redis_db_update/__init__.py +0 -0
- plugins/redis_db_update/plugin.py +188 -0
- plugins/resource_folders_creation/__init__.py +0 -0
- plugins/resource_folders_creation/plugin.py +81 -0
- plugins/system_notification_sender/__init__.py +0 -0
- plugins/system_notification_sender/plugin.py +188 -0
- plugins/update_access_controls/__init__.py +0 -0
- plugins/update_access_controls/plugin.py +9 -0
- pytests/__init__.py +0 -0
- pytests/api_user_models_erros_test.py +16 -0
- pytests/api_user_models_requests_test.py +98 -0
- pytests/archive_test.py +72 -0
- pytests/base_test.py +300 -0
- pytests/get_settings_test.py +14 -0
- pytests/json_to_db_migration_test.py +237 -0
- pytests/service_test.py +26 -0
- pytests/test_info.py +55 -0
- pytests/test_status.py +15 -0
- run_notification_campaign.py +98 -0
- scheduled_notification_handler.py +121 -0
- schema_migration.py +208 -0
- schema_modulate.py +192 -0
- set_admin_passwd.py +55 -0
- sync.py +202 -0
- utils/__init__.py +0 -0
- utils/access_control.py +306 -0
- utils/async_request.py +8 -0
- utils/exporter.py +309 -0
- utils/firebase_notifier.py +57 -0
- utils/generate_email.py +38 -0
- utils/helpers.py +352 -0
- utils/hypercorn_config.py +12 -0
- utils/internal_error_code.py +60 -0
- utils/jwt.py +124 -0
- utils/logger.py +167 -0
- utils/middleware.py +99 -0
- utils/notification.py +75 -0
- utils/password_hashing.py +16 -0
- utils/plugin_manager.py +215 -0
- utils/query_policies_helper.py +112 -0
- utils/regex.py +44 -0
- utils/repository.py +529 -0
- utils/router_helper.py +19 -0
- utils/settings.py +165 -0
- utils/sms_notifier.py +21 -0
- utils/social_sso.py +67 -0
- utils/templates/activation.html.j2 +26 -0
- utils/templates/reminder.html.j2 +17 -0
- utils/ticket_sys_utils.py +203 -0
- utils/web_notifier.py +29 -0
- websocket.py +231 -0
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
#!/usr/bin/env -S BACKEND_ENV=config.env python3
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import asyncio
|
|
5
|
+
|
|
6
|
+
from jsonschema import ValidationError
|
|
7
|
+
from jsonschema.validators import Draft7Validator
|
|
8
|
+
import json
|
|
9
|
+
import time
|
|
10
|
+
from sqlmodel import select, col, delete
|
|
11
|
+
from adapter import SQLAdapter
|
|
12
|
+
from data_adapters.adapter import data_adapter as db
|
|
13
|
+
from data_adapters.sql.create_tables import Entries, Spaces
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
from models import core, api
|
|
17
|
+
from models.enums import ContentType, RequestType, ResourceType
|
|
18
|
+
from api.managed.router import serve_request
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
duplicated_entries : dict= {}
|
|
22
|
+
|
|
23
|
+
key_entries: dict = {}
|
|
24
|
+
MAX_INVALID_SIZE = 100
|
|
25
|
+
|
|
26
|
+
# {"space_name": {"schema_name": SCHEMA_DATA_DICT}}
|
|
27
|
+
spaces_schemas: dict[str, dict[str, dict]] = {}
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
async def main(health_type: str, space_param: str, schemas_param: list):
|
|
31
|
+
async with SQLAdapter().get_session() as session:
|
|
32
|
+
session.execute(
|
|
33
|
+
delete(Entries).where(col(Entries.subpath) == "/health_check")
|
|
34
|
+
)
|
|
35
|
+
await session.commit()
|
|
36
|
+
|
|
37
|
+
health_type = "hard" if health_type is None else health_type
|
|
38
|
+
space_param = "all" if space_param is None else space_param
|
|
39
|
+
|
|
40
|
+
if health_type not in ["soft", "hard"]:
|
|
41
|
+
print("Wrong mode specify [soft or hard]")
|
|
42
|
+
return
|
|
43
|
+
|
|
44
|
+
spaces = await db.get_spaces()
|
|
45
|
+
spaces_names : list = []
|
|
46
|
+
|
|
47
|
+
if space_param != "all":
|
|
48
|
+
if space_param not in spaces.keys():
|
|
49
|
+
print(f"space name {space_param} is not found")
|
|
50
|
+
return
|
|
51
|
+
spaces_names = [spaces[space_param]]
|
|
52
|
+
else:
|
|
53
|
+
spaces_names = list(spaces.keys())
|
|
54
|
+
|
|
55
|
+
if health_type == "soft":
|
|
56
|
+
pass
|
|
57
|
+
elif health_type == "hard":
|
|
58
|
+
for space in spaces_names:
|
|
59
|
+
await hard_space_check(space)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
async def hard_space_check(space):
|
|
63
|
+
async with SQLAdapter().get_session() as session:
|
|
64
|
+
sql_stm = select(Entries).where(col(Entries.space_name) == space)
|
|
65
|
+
_result = session.exec(sql_stm).all()
|
|
66
|
+
_result = [r[0] for r in _result]
|
|
67
|
+
entries = list(_result)
|
|
68
|
+
folders_report: dict[str, dict[str, Any]] = {}
|
|
69
|
+
|
|
70
|
+
_sql_stm = select(Spaces).where(col(Spaces.shortname) == space)
|
|
71
|
+
target_space: Spaces | None = session.exec(_sql_stm).first()
|
|
72
|
+
if target_space:
|
|
73
|
+
schema_data_space: Entries | None = session.exec(
|
|
74
|
+
select(Entries)
|
|
75
|
+
.where(Entries.shortname == 'metafile')
|
|
76
|
+
.where(Entries.subpath == "/schema")
|
|
77
|
+
).first()
|
|
78
|
+
if "/" not in folders_report:
|
|
79
|
+
folders_report["/" ] = {
|
|
80
|
+
"valid_entries": 0,
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if schema_data_space and schema_data_space.payload:
|
|
84
|
+
try:
|
|
85
|
+
if isinstance(schema_data_space.payload, dict):
|
|
86
|
+
Draft7Validator(
|
|
87
|
+
schema_data_space.payload["body"]
|
|
88
|
+
).validate(
|
|
89
|
+
json.loads(target_space.model_dump_json())
|
|
90
|
+
)
|
|
91
|
+
folders_report['/']["valid_entries"] += 1
|
|
92
|
+
except ValidationError as e:
|
|
93
|
+
issue = {
|
|
94
|
+
"issues": ["payload"],
|
|
95
|
+
"uuid": str(target_space.uuid),
|
|
96
|
+
"shortname": target_space.shortname,
|
|
97
|
+
"resource_type": 'space',
|
|
98
|
+
"exception": str(e),
|
|
99
|
+
}
|
|
100
|
+
if folders_report['/'].get("invalid_entries", None) is None:
|
|
101
|
+
folders_report['/']["invalid_entries"] = []
|
|
102
|
+
folders_report['/']["invalid_entries"] = [
|
|
103
|
+
*folders_report['/']["invalid_entries"],
|
|
104
|
+
issue
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
for entry in entries:
|
|
108
|
+
subpath = entry.subpath[1:]
|
|
109
|
+
if subpath == "":
|
|
110
|
+
subpath = "/"
|
|
111
|
+
|
|
112
|
+
payload : core.Payload
|
|
113
|
+
if entry.payload and isinstance(entry.payload, dict):
|
|
114
|
+
try:
|
|
115
|
+
payload = core.Payload.model_validate(entry.payload)
|
|
116
|
+
except Exception as e:
|
|
117
|
+
issue = {
|
|
118
|
+
"issues": ["payload"],
|
|
119
|
+
"uuid": str(entry.uuid),
|
|
120
|
+
"shortname": entry.shortname,
|
|
121
|
+
"resource_type": 'space',
|
|
122
|
+
"exception": str(e),
|
|
123
|
+
}
|
|
124
|
+
if folders_report['/'].get("invalid_entries", None) is None:
|
|
125
|
+
folders_report['/']["invalid_entries"] = []
|
|
126
|
+
folders_report['/']["invalid_entries"] = [
|
|
127
|
+
*folders_report['/']["invalid_entries"],
|
|
128
|
+
issue
|
|
129
|
+
]
|
|
130
|
+
continue
|
|
131
|
+
elif isinstance(entry.payload, core.Payload):
|
|
132
|
+
payload = entry.payload
|
|
133
|
+
else:
|
|
134
|
+
continue
|
|
135
|
+
|
|
136
|
+
if not payload.schema_shortname:
|
|
137
|
+
continue
|
|
138
|
+
|
|
139
|
+
body = payload.body
|
|
140
|
+
schema_data = session.exec(
|
|
141
|
+
select(Entries)
|
|
142
|
+
.where(Entries.shortname == payload.schema_shortname)
|
|
143
|
+
.where(Entries.subpath == "/schema")
|
|
144
|
+
).first()
|
|
145
|
+
|
|
146
|
+
if not schema_data:
|
|
147
|
+
continue
|
|
148
|
+
|
|
149
|
+
schema_payload : core.Payload
|
|
150
|
+
if schema_data.payload and isinstance(schema_data.payload, dict):
|
|
151
|
+
schema_payload = core.Payload.model_validate(schema_data.payload)
|
|
152
|
+
elif schema_data.payload and isinstance(schema_data.payload, core.Payload):
|
|
153
|
+
schema_payload = schema_data.payload
|
|
154
|
+
else:
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
if not schema_payload.body:
|
|
158
|
+
continue
|
|
159
|
+
schema_body = schema_payload.body
|
|
160
|
+
if isinstance(schema_body, str):
|
|
161
|
+
continue
|
|
162
|
+
|
|
163
|
+
if subpath not in folders_report:
|
|
164
|
+
folders_report[subpath] = {
|
|
165
|
+
"valid_entries": 0,
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
Draft7Validator(
|
|
170
|
+
schema_body
|
|
171
|
+
).validate(body)
|
|
172
|
+
folders_report[subpath]["valid_entries"] += 1
|
|
173
|
+
except ValidationError as e:
|
|
174
|
+
issue = {
|
|
175
|
+
"issues": ["payload"],
|
|
176
|
+
"uuid": str(entry.uuid),
|
|
177
|
+
"shortname": entry.shortname,
|
|
178
|
+
"resource_type": entry.resource_type,
|
|
179
|
+
"exception": str(e),
|
|
180
|
+
}
|
|
181
|
+
if folders_report[subpath].get("invalid_entries", None) is None:
|
|
182
|
+
folders_report[subpath]["invalid_entries"] = []
|
|
183
|
+
folders_report[subpath]["invalid_entries"] = [
|
|
184
|
+
*folders_report[subpath]["invalid_entries"],
|
|
185
|
+
issue
|
|
186
|
+
]
|
|
187
|
+
|
|
188
|
+
await save_health_check_entry(
|
|
189
|
+
{"folders_report": folders_report}, space
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
async def save_health_check_entry(health_check, space_name: str):
|
|
194
|
+
try:
|
|
195
|
+
await serve_request(
|
|
196
|
+
request=api.Request(
|
|
197
|
+
space_name="management",
|
|
198
|
+
request_type=RequestType.create,
|
|
199
|
+
records=[
|
|
200
|
+
core.Record(
|
|
201
|
+
resource_type=ResourceType.content,
|
|
202
|
+
shortname=space_name,
|
|
203
|
+
subpath="/health_check",
|
|
204
|
+
attributes={
|
|
205
|
+
"is_active": True,
|
|
206
|
+
"payload": {
|
|
207
|
+
"schema_shortname": "health_check",
|
|
208
|
+
"content_type": ContentType.json,
|
|
209
|
+
"body": health_check
|
|
210
|
+
}
|
|
211
|
+
},
|
|
212
|
+
)
|
|
213
|
+
],
|
|
214
|
+
),
|
|
215
|
+
owner_shortname='dmart',
|
|
216
|
+
)
|
|
217
|
+
except Exception as e:
|
|
218
|
+
print(e)
|
|
219
|
+
|
|
220
|
+
if __name__ == "__main__":
|
|
221
|
+
parser = argparse.ArgumentParser(
|
|
222
|
+
description="This created for doing health check functionality",
|
|
223
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
224
|
+
)
|
|
225
|
+
parser.add_argument("-t", "--type", help="type of health check (soft or hard)")
|
|
226
|
+
parser.add_argument("-s", "--space", help="hit the target space or pass (all) to make the full health check")
|
|
227
|
+
parser.add_argument("-m", "--schemas", nargs="*", help="hit the target schema inside the space")
|
|
228
|
+
|
|
229
|
+
args = parser.parse_args()
|
|
230
|
+
before_time = time.time()
|
|
231
|
+
asyncio.run(main(args.type, args.space or "all", args.schemas))
|
|
232
|
+
print(f'total time: {"{:.2f}".format(time.time() - before_time)} sec')
|
|
@@ -0,0 +1,454 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from data_adapters.sql.adapter import SQLAdapter
|
|
3
|
+
from utils.settings import settings
|
|
4
|
+
import asyncio
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
from uuid import uuid4
|
|
12
|
+
from utils.query_policies_helper import generate_query_policies
|
|
13
|
+
from models.enums import ResourceType, ContentType
|
|
14
|
+
from data_adapters.sql.create_tables import Entries, Users, Attachments, Roles, Permissions, Spaces, generate_tables, \
|
|
15
|
+
Histories
|
|
16
|
+
|
|
17
|
+
async def save_health_check_entry():
|
|
18
|
+
health_check_entry = {
|
|
19
|
+
"space_name": "management",
|
|
20
|
+
"resource_type": "content",
|
|
21
|
+
"shortname": "migration_json_to_db",
|
|
22
|
+
"subpath": "/health_check",
|
|
23
|
+
"is_active": True,
|
|
24
|
+
"payload": {
|
|
25
|
+
"schema_shortname": "health_check",
|
|
26
|
+
"content_type": ContentType.json,
|
|
27
|
+
"body": {"folders_report": folders_report}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
async with SQLAdapter().get_session() as session:
|
|
33
|
+
session.add(Spaces.model_validate(health_check_entry))
|
|
34
|
+
await session.commit()
|
|
35
|
+
except Exception as e:
|
|
36
|
+
print(e)
|
|
37
|
+
|
|
38
|
+
def subpath_checker(subpath: str):
|
|
39
|
+
if subpath.endswith("/"):
|
|
40
|
+
subpath = subpath[:-1]
|
|
41
|
+
if not subpath.startswith("/"):
|
|
42
|
+
subpath = '/' + subpath
|
|
43
|
+
return subpath
|
|
44
|
+
|
|
45
|
+
folders_report: Any = {}
|
|
46
|
+
invalid_entries: Any = []
|
|
47
|
+
|
|
48
|
+
def save_issue(resource_type, entry, e):
|
|
49
|
+
entry_uuid = None
|
|
50
|
+
entry_shortname = None
|
|
51
|
+
if isinstance(entry, dict):
|
|
52
|
+
entry_uuid = str(entry["uuid"])
|
|
53
|
+
entry_shortname = entry["shortname"]
|
|
54
|
+
else:
|
|
55
|
+
entry_uuid = str(entry.uuid)
|
|
56
|
+
entry_shortname = entry.shortname
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
"issues": ["entry"],
|
|
60
|
+
"uuid": entry_uuid,
|
|
61
|
+
"shortname": entry_shortname,
|
|
62
|
+
"resource_type": resource_type,
|
|
63
|
+
"exception": str(e),
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
def save_report(isubpath: str, issue):
|
|
67
|
+
if folders_report.get(isubpath, False):
|
|
68
|
+
if folders_report[isubpath].get("invalid_entries", False):
|
|
69
|
+
folders_report[isubpath]["invalid_entries"] = [
|
|
70
|
+
*folders_report[isubpath]["invalid_entries"],
|
|
71
|
+
issue
|
|
72
|
+
]
|
|
73
|
+
else:
|
|
74
|
+
folders_report[isubpath]["invalid_entries"] = [
|
|
75
|
+
issue
|
|
76
|
+
]
|
|
77
|
+
else:
|
|
78
|
+
folders_report[isubpath] = {
|
|
79
|
+
"invalid_entries": [
|
|
80
|
+
issue
|
|
81
|
+
]
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async def bulk_insert_in_batches(model, records, batch_size=2000):
|
|
85
|
+
async with SQLAdapter().get_session() as session:
|
|
86
|
+
try:
|
|
87
|
+
for i in range(0, len(records), batch_size):
|
|
88
|
+
batch = []
|
|
89
|
+
try:
|
|
90
|
+
batch = records[i:i + batch_size]
|
|
91
|
+
for record in batch:
|
|
92
|
+
if isinstance(record.get('created_at'), str):
|
|
93
|
+
record['created_at'] = datetime.fromisoformat(record['created_at'])
|
|
94
|
+
if isinstance(record.get('updated_at'), str):
|
|
95
|
+
record['updated_at'] = datetime.fromisoformat(record['updated_at'])
|
|
96
|
+
await session.run_sync(lambda ses: ses.bulk_insert_mappings(model, batch))
|
|
97
|
+
await session.commit()
|
|
98
|
+
except Exception as e:
|
|
99
|
+
print("[!bulk_insert_in_batches]", e)
|
|
100
|
+
await session.rollback()
|
|
101
|
+
for _batch in batch:
|
|
102
|
+
try:
|
|
103
|
+
session.add(model.model_validate(_batch))
|
|
104
|
+
await session.commit()
|
|
105
|
+
except Exception as e:
|
|
106
|
+
await session.rollback()
|
|
107
|
+
print(
|
|
108
|
+
"[!bulk_insert_in_batches_single]",
|
|
109
|
+
e,
|
|
110
|
+
f"* {_batch['subpath']}/{_batch['shortname']}"
|
|
111
|
+
)
|
|
112
|
+
save_report('/', save_issue(_batch['resource_type'], _batch, e))
|
|
113
|
+
except Exception as e:
|
|
114
|
+
print("[!fatal_bulk_insert_in_batches]", e)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
async def _process_directory(root, dirs, space_name, subpath):
|
|
118
|
+
# asyncio.run()
|
|
119
|
+
await process_directory(root, dirs, space_name, subpath)
|
|
120
|
+
|
|
121
|
+
async def process_directory(root, dirs, space_name, subpath):
|
|
122
|
+
histories = []
|
|
123
|
+
attachments = []
|
|
124
|
+
entries = []
|
|
125
|
+
users = []
|
|
126
|
+
roles = []
|
|
127
|
+
permissions = []
|
|
128
|
+
|
|
129
|
+
for dir in dirs:
|
|
130
|
+
for file in os.listdir(os.path.join(root, dir)):
|
|
131
|
+
if not file.startswith('meta'):
|
|
132
|
+
if file == 'history.jsonl':
|
|
133
|
+
lines = open(os.path.join(root, dir, file), 'r').readlines()
|
|
134
|
+
for line in lines:
|
|
135
|
+
history = None
|
|
136
|
+
try:
|
|
137
|
+
history = json.loads(line.replace('\n', ''))
|
|
138
|
+
history['shortname'] = dir
|
|
139
|
+
history['space_name'] = space_name
|
|
140
|
+
history['subpath'] = subpath_checker(subpath)
|
|
141
|
+
history['timestamp'] = datetime.strptime(history['timestamp'], '%Y-%m-%dT%H:%M:%S.%f')
|
|
142
|
+
|
|
143
|
+
histories.append(history)
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
print(f"Error processing Histories {space_name}/{subpath}/{dir}/{history} ... ")
|
|
147
|
+
print(e)
|
|
148
|
+
|
|
149
|
+
p = os.path.join(root, dir, file)
|
|
150
|
+
if Path(p).is_file():
|
|
151
|
+
if 'attachments' in p:
|
|
152
|
+
if file.startswith("meta") and file.endswith(".json"):
|
|
153
|
+
_attachment = json.load(open(os.path.join(root, dir, file)))
|
|
154
|
+
_attachment['space_name'] = space_name
|
|
155
|
+
_attachment['uuid'] = _attachment.get('uuid', uuid4())
|
|
156
|
+
_attachment['subpath'] = subpath.replace('//', '/')
|
|
157
|
+
_attachment['subpath'] = subpath_checker(_attachment['subpath'])
|
|
158
|
+
_attachment['acl'] = _attachment.get('acl', [])
|
|
159
|
+
_attachment['relationships'] = _attachment.get('relationships', [])
|
|
160
|
+
_attachment['tags'] = _attachment.get('tags', [])
|
|
161
|
+
_attachment['owner_shortname'] = _attachment.get('owner_shortname', '')
|
|
162
|
+
_attachment['media'] = None
|
|
163
|
+
if file.replace("attachments.", "") == 'comment':
|
|
164
|
+
_attachment['payload'] = {
|
|
165
|
+
'body': _attachment.get('body', ''),
|
|
166
|
+
'state': _attachment.get('state', '')
|
|
167
|
+
}
|
|
168
|
+
elif file.replace("attachments.", "") == 'ticket':
|
|
169
|
+
_attachment['payload'] = {
|
|
170
|
+
'state': _attachment.get('state', ''),
|
|
171
|
+
'is_open': _attachment.get('is_open', True),
|
|
172
|
+
'reporter': _attachment.get('reporter', ''),
|
|
173
|
+
'workflow_shortname': _attachment.get('workflow_shortname', ''),
|
|
174
|
+
'collaborators': _attachment.get('collaborators', {})
|
|
175
|
+
}
|
|
176
|
+
else:
|
|
177
|
+
_body: str = _attachment.get('payload', {}).get('body', None)
|
|
178
|
+
if _body and _body.endswith('.json'):
|
|
179
|
+
_attachment_body = json.load(open(os.path.join(root, dir, _body)))
|
|
180
|
+
_attachment['payload']['body'] = _attachment_body
|
|
181
|
+
elif _body:
|
|
182
|
+
if not _attachment.get('payload', {}).get('content_type', False):
|
|
183
|
+
_attachment['media'] = None
|
|
184
|
+
else:
|
|
185
|
+
try:
|
|
186
|
+
_attachment['media'] = open(os.path.join(root, dir, _body), 'rb').read()
|
|
187
|
+
except Exception as e:
|
|
188
|
+
print(f"Error reading media file {os.path.join(root, dir, _body)}: {e}")
|
|
189
|
+
_attachment['media'] = None
|
|
190
|
+
if _attachment.get('payload', None) is None:
|
|
191
|
+
_attachment['payload'] = {}
|
|
192
|
+
try:
|
|
193
|
+
_attachment['resource_type'] = dir.replace('attachments.', '')
|
|
194
|
+
attachments.append(_attachment)
|
|
195
|
+
except Exception as e:
|
|
196
|
+
print(f"Error processing Attachments {space_name}/{subpath}/{dir}/{file} ... ")
|
|
197
|
+
print("!!", e)
|
|
198
|
+
save_report('/', save_issue(_attachment['resource_type'], _attachment, e))
|
|
199
|
+
elif file.startswith('meta.') and file.endswith('.json'):
|
|
200
|
+
entry = json.load(open(p))
|
|
201
|
+
entry['space_name'] = space_name
|
|
202
|
+
body = None
|
|
203
|
+
_payload = entry.get('payload', {})
|
|
204
|
+
if _payload:
|
|
205
|
+
if payload := entry.get('payload', {}).get('body', None):
|
|
206
|
+
if entry.get('payload', {}).get('content_type', None) == 'json':
|
|
207
|
+
try:
|
|
208
|
+
body = json.load(open(
|
|
209
|
+
os.path.join(root, dir, '../..', payload)
|
|
210
|
+
))
|
|
211
|
+
except Exception as e:
|
|
212
|
+
save_report('/', save_issue(ResourceType.json, entry, e))
|
|
213
|
+
else:
|
|
214
|
+
body = payload
|
|
215
|
+
|
|
216
|
+
sha1 = hashlib.sha1()
|
|
217
|
+
sha1.update(json.dumps(body).encode())
|
|
218
|
+
checksum = sha1.hexdigest()
|
|
219
|
+
entry['payload']['checksum'] = checksum
|
|
220
|
+
entry['payload']['body'] = body
|
|
221
|
+
else:
|
|
222
|
+
entry['payload'] = None
|
|
223
|
+
entry['subpath'] = subpath_checker(subpath)
|
|
224
|
+
entry['acl'] = entry.get('acl', [])
|
|
225
|
+
entry['relationships'] = entry.get('relationships', [])
|
|
226
|
+
try:
|
|
227
|
+
if file.startswith("meta.user"):
|
|
228
|
+
entry['query_policies'] = generate_query_policies(
|
|
229
|
+
space_name=space_name,
|
|
230
|
+
subpath=subpath,
|
|
231
|
+
resource_type=ResourceType.user,
|
|
232
|
+
is_active=True,
|
|
233
|
+
owner_shortname=entry.get('owner_shortname', 'dmart'),
|
|
234
|
+
owner_group_shortname=entry.get('owner_group_shortname', None),
|
|
235
|
+
)
|
|
236
|
+
entry['resource_type'] = 'user'
|
|
237
|
+
entry['firebase_token'] = entry.get('firebase_token', '')
|
|
238
|
+
entry['type'] = entry.get('type', 'web')
|
|
239
|
+
entry['language'] = entry.get('language', '')
|
|
240
|
+
entry['google_id'] = entry.get('google_id', '')
|
|
241
|
+
entry['facebook_id'] = entry.get('facebook_id', '')
|
|
242
|
+
entry['social_avatar_url'] = entry.get('social_avatar_url', '')
|
|
243
|
+
entry['displayname'] = entry.get('displayname', {})
|
|
244
|
+
entry['description'] = entry.get('description', {})
|
|
245
|
+
users.append(entry)
|
|
246
|
+
elif file.startswith("meta.role"):
|
|
247
|
+
entry['query_policies'] = generate_query_policies(
|
|
248
|
+
space_name=space_name,
|
|
249
|
+
subpath=subpath,
|
|
250
|
+
resource_type=ResourceType.role,
|
|
251
|
+
is_active=True,
|
|
252
|
+
owner_shortname=entry.get('owner_shortname', 'dmart'),
|
|
253
|
+
owner_group_shortname=entry.get('owner_group_shortname', None),
|
|
254
|
+
)
|
|
255
|
+
entry['resource_type'] = 'role'
|
|
256
|
+
entry['permissions'] = entry.get('permissions', [])
|
|
257
|
+
roles.append(entry)
|
|
258
|
+
elif file.startswith("meta.permission"):
|
|
259
|
+
entry['query_policies'] = generate_query_policies(
|
|
260
|
+
space_name=space_name,
|
|
261
|
+
subpath=subpath,
|
|
262
|
+
resource_type=ResourceType.permission,
|
|
263
|
+
is_active=True,
|
|
264
|
+
owner_shortname=entry.get('owner_shortname', 'dmart'),
|
|
265
|
+
owner_group_shortname=entry.get('owner_group_shortname', None),
|
|
266
|
+
)
|
|
267
|
+
entry['resource_type'] = 'permission'
|
|
268
|
+
entry['subpaths'] = entry.get('subpaths', {})
|
|
269
|
+
entry['resource_types'] = entry.get('resource_types', [])
|
|
270
|
+
entry['actions'] = entry.get('actions', [])
|
|
271
|
+
entry['conditions'] = entry.get('conditions', [])
|
|
272
|
+
entry['restricted_fields'] = entry.get('restricted_fields', [])
|
|
273
|
+
entry['allowed_fields_values'] = entry.get('allowed_fields_values', {})
|
|
274
|
+
permissions.append(entry)
|
|
275
|
+
else:
|
|
276
|
+
entry['resource_type'] = file.replace('.json', '').replace('meta.', '')
|
|
277
|
+
|
|
278
|
+
entry['query_policies'] = generate_query_policies(
|
|
279
|
+
space_name=space_name,
|
|
280
|
+
subpath=subpath,
|
|
281
|
+
resource_type=entry['resource_type'],
|
|
282
|
+
is_active=True,
|
|
283
|
+
owner_shortname=entry.get('owner_shortname', 'dmart'),
|
|
284
|
+
owner_group_shortname=entry.get('owner_group_shortname', None),
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
if entry['resource_type'] == 'folder':
|
|
288
|
+
new_subpath = entry['subpath'].split('/')
|
|
289
|
+
entry['subpath'] = '/'.join(new_subpath[:-1]) + '/'
|
|
290
|
+
elif entry['resource_type'] == 'ticket':
|
|
291
|
+
entry["state"] = entry.get("state", "")
|
|
292
|
+
entry["is_open"] = entry.get("is_open", True)
|
|
293
|
+
entry["reporter"] = entry.get("reporter", None)
|
|
294
|
+
entry["workflow_shortname"] = entry.get("workflow_shortname", "")
|
|
295
|
+
entry["collaborators"] = entry.get("collaborators", None)
|
|
296
|
+
entry["resolution_reason"] = entry.get("resolution_reason", None)
|
|
297
|
+
entry['displayname'] = entry.get('displayname', {})
|
|
298
|
+
entry['description'] = entry.get('description', {})
|
|
299
|
+
entry["subpath"] = subpath_checker(entry["subpath"])
|
|
300
|
+
entries.append(entry)
|
|
301
|
+
continue
|
|
302
|
+
entry["subpath"] = subpath_checker(entry["subpath"])
|
|
303
|
+
|
|
304
|
+
entries.append(entry)
|
|
305
|
+
except Exception as e:
|
|
306
|
+
save_report('/', save_issue(entry['resource_type'], entry, e))
|
|
307
|
+
|
|
308
|
+
await bulk_insert_in_batches(Users, users)
|
|
309
|
+
await bulk_insert_in_batches(Roles, roles)
|
|
310
|
+
await bulk_insert_in_batches(Permissions, permissions)
|
|
311
|
+
await bulk_insert_in_batches(Entries, entries)
|
|
312
|
+
await bulk_insert_in_batches(Attachments, attachments)
|
|
313
|
+
await bulk_insert_in_batches(Histories, histories)
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
async def main():
|
|
317
|
+
generate_tables()
|
|
318
|
+
|
|
319
|
+
target_path = settings.spaces_folder
|
|
320
|
+
|
|
321
|
+
if len(sys.argv) == 2 and sys.argv[1] != 'json_to_db':
|
|
322
|
+
target_path = target_path.joinpath(sys.argv[1])
|
|
323
|
+
|
|
324
|
+
if not target_path.exists():
|
|
325
|
+
print(f"Space '{str(target_path).replace('/', '')}' does not exist")
|
|
326
|
+
sys.exit(1)
|
|
327
|
+
|
|
328
|
+
all_dirs = []
|
|
329
|
+
user_dirs = []
|
|
330
|
+
for root, dirs, _ in os.walk(str(target_path)):
|
|
331
|
+
if root.startswith(os.path.join(str(target_path), 'management/users')):
|
|
332
|
+
user_dirs.append((root, sorted(dirs, key=lambda d: d != 'dmart')))
|
|
333
|
+
else:
|
|
334
|
+
all_dirs.append((root, dirs))
|
|
335
|
+
|
|
336
|
+
user_dirs.sort(key=lambda x: (
|
|
337
|
+
not x[0].startswith(os.path.join(str(target_path), 'management/users/.dm')),
|
|
338
|
+
x[0]
|
|
339
|
+
))
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
for root, dirs in user_dirs:
|
|
343
|
+
tmp = root.replace(str(settings.spaces_folder), '')
|
|
344
|
+
if tmp == '':
|
|
345
|
+
continue
|
|
346
|
+
if tmp[0] == '/':
|
|
347
|
+
tmp = tmp[1:]
|
|
348
|
+
space_name = tmp.split('/')[0]
|
|
349
|
+
subpath = '/'.join(tmp.split('/')[1:])
|
|
350
|
+
if space_name == '..':
|
|
351
|
+
continue
|
|
352
|
+
|
|
353
|
+
if space_name.startswith('.git'):
|
|
354
|
+
continue
|
|
355
|
+
|
|
356
|
+
subpath = subpath.replace('.dm', '')
|
|
357
|
+
if subpath != '/' and subpath.endswith('/'):
|
|
358
|
+
subpath = subpath[:-1]
|
|
359
|
+
|
|
360
|
+
if subpath == '':
|
|
361
|
+
subpath = '/'
|
|
362
|
+
|
|
363
|
+
await process_directory(root, dirs, space_name, subpath)
|
|
364
|
+
|
|
365
|
+
# with ThreadPoolExecutor() as executor:
|
|
366
|
+
# futures = []
|
|
367
|
+
for root, dirs in all_dirs:
|
|
368
|
+
tmp = root.replace(str(settings.spaces_folder), '')
|
|
369
|
+
if tmp == '':
|
|
370
|
+
continue
|
|
371
|
+
if tmp[0] == '/':
|
|
372
|
+
tmp = tmp[1:]
|
|
373
|
+
space_name = tmp.split('/')[0]
|
|
374
|
+
subpath = '/'.join(tmp.split('/')[1:])
|
|
375
|
+
if space_name == '..':
|
|
376
|
+
continue
|
|
377
|
+
|
|
378
|
+
if space_name.startswith('.git'):
|
|
379
|
+
continue
|
|
380
|
+
|
|
381
|
+
print(".", end='')
|
|
382
|
+
if subpath == '' or subpath == '/':
|
|
383
|
+
subpath = '/'
|
|
384
|
+
p = os.path.join(root, '.dm', 'meta.space.json')
|
|
385
|
+
entry = {}
|
|
386
|
+
if Path(p).is_file():
|
|
387
|
+
try:
|
|
388
|
+
entry = json.load(open(p))
|
|
389
|
+
entry['space_name'] = space_name
|
|
390
|
+
entry['shortname'] = space_name
|
|
391
|
+
entry['query_policies'] = generate_query_policies(
|
|
392
|
+
space_name=space_name,
|
|
393
|
+
subpath=subpath,
|
|
394
|
+
resource_type=ResourceType.space,
|
|
395
|
+
is_active=True,
|
|
396
|
+
owner_shortname=entry.get('owner_shortname', 'dmart'),
|
|
397
|
+
owner_group_shortname=entry.get('owner_group_shortname', None),
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
_payload = entry.get('payload', {})
|
|
401
|
+
if _payload:
|
|
402
|
+
if payload := _payload.get('body', None):
|
|
403
|
+
if entry.get('payload', {}).get('content_type', None) == 'json':
|
|
404
|
+
body = json.load(open(
|
|
405
|
+
os.path.join(root, '.dm', '../..', str(payload))
|
|
406
|
+
))
|
|
407
|
+
else:
|
|
408
|
+
body = payload
|
|
409
|
+
sha1 = hashlib.sha1()
|
|
410
|
+
sha1.update(json.dumps(body).encode())
|
|
411
|
+
checksum = sha1.hexdigest()
|
|
412
|
+
entry['payload']['checksum'] = checksum
|
|
413
|
+
entry['payload']['body'] = body
|
|
414
|
+
else:
|
|
415
|
+
entry['payload'] = None
|
|
416
|
+
entry['subpath'] = '/'
|
|
417
|
+
entry['resource_type'] = 'space'
|
|
418
|
+
entry['tags'] = entry.get('tags', [])
|
|
419
|
+
entry['acl'] = entry.get('acl', [])
|
|
420
|
+
entry['hide_folders'] = entry.get('hide_folders', [])
|
|
421
|
+
entry['relationships'] = entry.get('relationships', [])
|
|
422
|
+
entry['hide_space'] = entry.get('hide_space', False)
|
|
423
|
+
|
|
424
|
+
async with SQLAdapter().get_session() as session:
|
|
425
|
+
session.add(Spaces.model_validate(entry))
|
|
426
|
+
await session.commit()
|
|
427
|
+
except Exception as e:
|
|
428
|
+
save_report('/', save_issue(ResourceType.space, entry, e))
|
|
429
|
+
continue
|
|
430
|
+
|
|
431
|
+
subpath = subpath.replace('.dm', '')
|
|
432
|
+
if subpath != '/' and subpath.endswith('/'):
|
|
433
|
+
subpath = subpath[:-1]
|
|
434
|
+
|
|
435
|
+
if subpath == '':
|
|
436
|
+
subpath = '/'
|
|
437
|
+
|
|
438
|
+
await _process_directory(root, dirs, space_name, subpath)
|
|
439
|
+
# futures.append(executor.submit(_process_directory, root, dirs, space_name, subpath))
|
|
440
|
+
# as_completed(futures)
|
|
441
|
+
|
|
442
|
+
# for future in as_completed(futures):
|
|
443
|
+
# future.result()
|
|
444
|
+
|
|
445
|
+
if settings.active_data_db == 'file':
|
|
446
|
+
print("[Warning] you are using active_data_db='file', please don't forget to set it to active_data_db='sql' in your config.env")
|
|
447
|
+
|
|
448
|
+
await save_health_check_entry()
|
|
449
|
+
|
|
450
|
+
# await SQLAdapter().ensure_authz_materialized_views_fresh()
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
if __name__ == "__main__":
|
|
454
|
+
asyncio.run(main())
|