valar 1.0.21__tar.gz → 1.0.23__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {valar-1.0.21/src/valar.egg-info → valar-1.0.23}/PKG-INFO +1 -1
- {valar-1.0.21 → valar-1.0.23}/setup.py +1 -4
- valar-1.0.23/src/valar/channels/__init__.py +1 -0
- valar-1.0.23/src/valar/channels/consumer.py +48 -0
- valar-1.0.23/src/valar/channels/executer.py +13 -0
- valar-1.0.23/src/valar/channels/mapping.py +21 -0
- valar-1.0.23/src/valar/channels/sender.py +60 -0
- valar-1.0.23/src/valar/channels/views.py +12 -0
- valar-1.0.23/src/valar/core/counter.py +9 -0
- valar-1.0.23/src/valar/core/dao/__init__.py +0 -0
- valar-1.0.23/src/valar/core/dao/_mon_array2tree.py +18 -0
- valar-1.0.23/src/valar/core/dao/dao_base.py +50 -0
- valar-1.0.23/src/valar/core/dao/dao_mon.py +76 -0
- valar-1.0.23/src/valar/core/dao/dao_orm.py +96 -0
- valar-1.0.23/src/valar/core/dao/engine.py +12 -0
- valar-1.0.23/src/valar/core/dao/engine_minio.py +90 -0
- valar-1.0.23/src/valar/core/dao/engine_mon.py +34 -0
- valar-1.0.23/src/valar/core/dao/engine_orm.py +25 -0
- valar-1.0.23/src/valar/core/dao/model_mon.py +24 -0
- valar-1.0.23/src/valar/core/dao/model_orm.py +192 -0
- valar-1.0.23/src/valar/core/dao/query_mon.py +12 -0
- valar-1.0.23/src/valar/core/dao/query_orm.py +43 -0
- valar-1.0.23/src/valar/core/dao/utils_orm.py +85 -0
- valar-1.0.23/src/valar/core/dao_abstract.py +63 -0
- valar-1.0.23/src/valar/core/meta/__init__.py +0 -0
- valar-1.0.23/src/valar/core/meta/defaults/__init__.py +0 -0
- valar-1.0.23/src/valar/core/meta/defaults/field_keys_default.py +17 -0
- valar-1.0.23/src/valar/core/meta/defaults/field_values_default.py +85 -0
- valar-1.0.23/src/valar/core/meta/defaults/frame_defaults.py +136 -0
- valar-1.0.23/src/valar/core/meta/defaults/view_defaults.py +7 -0
- valar-1.0.23/src/valar/core/meta/field_orm.py +144 -0
- valar-1.0.23/src/valar/core/meta/init_meta_frame.py +30 -0
- valar-1.0.23/src/valar/core/meta/meta_orm.py +69 -0
- valar-1.0.21/src/valar/__init__.py → valar-1.0.23/src/valar/core/middleware.py +2 -8
- valar-1.0.23/src/valar/core/response.py +7 -0
- valar-1.0.23/src/valar/core/singleton_meta.py +6 -0
- valar-1.0.23/src/valar/core/valar_models.py +82 -0
- valar-1.0.23/src/valar/data/__init__.py +0 -0
- valar-1.0.23/src/valar/data/migrations/0001_initial.py +141 -0
- valar-1.0.23/src/valar/data/migrations/__init__.py +0 -0
- {valar-1.0.21 → valar-1.0.23}/src/valar/data/models.py +2 -121
- valar-1.0.23/src/valar/data/urls.py +18 -0
- valar-1.0.23/src/valar/data/views/__init__.py +0 -0
- valar-1.0.23/src/valar/data/views/handler.py +41 -0
- valar-1.0.23/src/valar/data/views/rest.py +86 -0
- {valar-1.0.21 → valar-1.0.23/src/valar.egg-info}/PKG-INFO +1 -1
- valar-1.0.23/src/valar.egg-info/SOURCES.txt +53 -0
- valar-1.0.21/src/valar/channels/__init__.py +0 -107
- valar-1.0.21/src/valar/channels/utils.py +0 -43
- valar-1.0.21/src/valar/channels/views.py +0 -17
- valar-1.0.21/src/valar/data/file/__init__.py +0 -91
- valar-1.0.21/src/valar/data/handlers.py +0 -28
- valar-1.0.21/src/valar/data/mon/__init__.py +0 -123
- valar-1.0.21/src/valar/data/mon/query_translator.py +0 -91
- valar-1.0.21/src/valar/data/orm/__init__.py +0 -135
- valar-1.0.21/src/valar/data/orm/detacher.py +0 -61
- valar-1.0.21/src/valar/data/orm/meta.py +0 -99
- valar-1.0.21/src/valar/data/orm/meta_frame.py +0 -100
- valar-1.0.21/src/valar/data/orm/meta_loader.py +0 -200
- valar-1.0.21/src/valar/data/orm/values.py +0 -102
- valar-1.0.21/src/valar/data/query.py +0 -48
- valar-1.0.21/src/valar/data/urls.py +0 -24
- valar-1.0.21/src/valar/data/utils.py +0 -70
- valar-1.0.21/src/valar/data/views.py +0 -173
- valar-1.0.21/src/valar.egg-info/SOURCES.txt +0 -29
- {valar-1.0.21 → valar-1.0.23}/LICENSE +0 -0
- {valar-1.0.21 → valar-1.0.23}/README.md +0 -0
- {valar-1.0.21 → valar-1.0.23}/setup.cfg +0 -0
- {valar-1.0.21/src/valar/data → valar-1.0.23/src/valar}/__init__.py +0 -0
- {valar-1.0.21/src/valar/data/migrations → valar-1.0.23/src/valar/core}/__init__.py +0 -0
- {valar-1.0.21 → valar-1.0.23}/src/valar.egg-info/dependency_links.txt +0 -0
- {valar-1.0.21 → valar-1.0.23}/src/valar.egg-info/requires.txt +0 -0
- {valar-1.0.21 → valar-1.0.23}/src/valar.egg-info/top_level.txt +0 -0
|
@@ -3,9 +3,6 @@ from setuptools import setup, find_packages
|
|
|
3
3
|
with open("README.md", "r", encoding="utf-8") as f:
|
|
4
4
|
long_description = f.read()
|
|
5
5
|
|
|
6
|
-
# with open('requirements.txt', "r", encoding="utf-8") as f:
|
|
7
|
-
# required = f.read().splitlines()
|
|
8
|
-
|
|
9
6
|
requires = [
|
|
10
7
|
'channels==3.0.3',
|
|
11
8
|
'pymongo~=4.11.2',
|
|
@@ -19,7 +16,7 @@ requires = [
|
|
|
19
16
|
|
|
20
17
|
setup(
|
|
21
18
|
name="valar", # 包名
|
|
22
|
-
version="1.0.
|
|
19
|
+
version="1.0.23", # 版本号
|
|
23
20
|
author="LYP", # 作者
|
|
24
21
|
author_email="liuyinpeng@buaa.edu.cn", # 邮箱
|
|
25
22
|
description="valar for morghulis", # 简短描述
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VALAR_CHANNEL_GROUP = 'VALAR'
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
|
|
2
|
+
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
|
3
|
+
|
|
4
|
+
from ..channels import VALAR_CHANNEL_GROUP
|
|
5
|
+
|
|
6
|
+
class ValarConsumer(AsyncJsonWebsocketConsumer):
|
|
7
|
+
|
|
8
|
+
def __init__(self):
|
|
9
|
+
self.client = None
|
|
10
|
+
self.uid = None
|
|
11
|
+
super().__init__()
|
|
12
|
+
|
|
13
|
+
async def connect(self):
|
|
14
|
+
params = self.scope['url_route']['kwargs']
|
|
15
|
+
self.client = params.get('client')
|
|
16
|
+
await self.channel_layer.group_add(VALAR_CHANNEL_GROUP, self.channel_name)
|
|
17
|
+
await self.accept()
|
|
18
|
+
|
|
19
|
+
async def disconnect(self, code):
|
|
20
|
+
await self.channel_layer.group_discard(VALAR_CHANNEL_GROUP, self.channel_name)
|
|
21
|
+
await self.close(code)
|
|
22
|
+
|
|
23
|
+
async def receive_json(self, data, *args, **kwargs):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
async def user_emit(self, event):
|
|
27
|
+
users: list = event.get('users',[])
|
|
28
|
+
data = event.get('data',{})
|
|
29
|
+
if self.uid in users:
|
|
30
|
+
await self.send_json(data)
|
|
31
|
+
|
|
32
|
+
async def client_emit(self, event):
|
|
33
|
+
clients: list = event.get('clients',[])
|
|
34
|
+
data = event.get('data',{})
|
|
35
|
+
if self.client in clients:
|
|
36
|
+
await self.send_json(data)
|
|
37
|
+
|
|
38
|
+
async def broadcast_emit(self, event):
|
|
39
|
+
data = event.get('data',{})
|
|
40
|
+
await self.send_json(data)
|
|
41
|
+
|
|
42
|
+
async def register_emit(self, event):
|
|
43
|
+
users: list = event.get('users', [])
|
|
44
|
+
clients: list = event.get('clients',[])
|
|
45
|
+
if self.client in clients:
|
|
46
|
+
self.uid = users[0]
|
|
47
|
+
|
|
48
|
+
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
async def execute_channel(method, sender):
|
|
4
|
+
thread = asyncio.to_thread(__execute__, method, sender)
|
|
5
|
+
asyncio.create_task(thread)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def __execute__(method, sender):
|
|
9
|
+
sender.to_clients(None, [sender.client], 'start')
|
|
10
|
+
method(sender)
|
|
11
|
+
sender.to_clients(None, [sender.client], 'stop')
|
|
12
|
+
|
|
13
|
+
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
|
|
3
|
+
from django.conf import settings
|
|
4
|
+
from django.core.exceptions import ImproperlyConfigured
|
|
5
|
+
|
|
6
|
+
class ChannelMapping:
|
|
7
|
+
def __init__(self):
|
|
8
|
+
root = settings.ROOT_URLCONF
|
|
9
|
+
module = importlib.import_module(root)
|
|
10
|
+
name = 'channel_mapping'
|
|
11
|
+
if hasattr(module, name):
|
|
12
|
+
self.mapping: dict = getattr(module, name)
|
|
13
|
+
else:
|
|
14
|
+
raise ImproperlyConfigured("%r has no attribute %r" % (root, name))
|
|
15
|
+
|
|
16
|
+
def get_handler(self, handler):
|
|
17
|
+
method = self.mapping.get(handler)
|
|
18
|
+
if method is None:
|
|
19
|
+
raise ImproperlyConfigured("Cannot find handler - %r" % handler)
|
|
20
|
+
return method
|
|
21
|
+
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from asgiref.sync import async_to_sync
|
|
6
|
+
from channels.layers import get_channel_layer
|
|
7
|
+
from django.http import HttpRequest
|
|
8
|
+
|
|
9
|
+
from ..channels import VALAR_CHANNEL_GROUP
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ValarSocketSender:
|
|
13
|
+
def __init__(self, request: HttpRequest, data=None):
|
|
14
|
+
body = json.loads(request.body)
|
|
15
|
+
auth = request.headers.get('AUTH')
|
|
16
|
+
self.client = request.headers.get('CLIENT')
|
|
17
|
+
self.uid = request.session.get('UID')
|
|
18
|
+
self.handlerKey = body.get('handlerKey')
|
|
19
|
+
self.channelKey = body.get('channelKey', 'default')
|
|
20
|
+
self.data = data or body.get('data')
|
|
21
|
+
self.send = get_channel_layer().group_send
|
|
22
|
+
self.start_time = time.time()
|
|
23
|
+
if auth and not self.uid:
|
|
24
|
+
raise Exception('Unauthorized!')
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def __convert_body(self, emit, payload, status ,clients = None, users = None):
|
|
28
|
+
return {
|
|
29
|
+
'type': emit,
|
|
30
|
+
'data': {
|
|
31
|
+
'status': status,
|
|
32
|
+
'handlerKey': self.handlerKey,
|
|
33
|
+
'channelKey': self.channelKey,
|
|
34
|
+
'payload': payload,
|
|
35
|
+
'timestamp': datetime.now().timestamp()
|
|
36
|
+
},
|
|
37
|
+
'clients': clients or [],
|
|
38
|
+
'users': users or [],
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def to_users(self, payload, users, status='proceed'):
|
|
43
|
+
body = self.__convert_body(emit='user.emit', payload=payload, status=status, users=users)
|
|
44
|
+
async_to_sync(self.send)(VALAR_CHANNEL_GROUP, body)
|
|
45
|
+
|
|
46
|
+
def to_clients(self,payload, clients, status='proceed', wait=False):
|
|
47
|
+
current_time = time.time()
|
|
48
|
+
time_span = current_time - self.start_time
|
|
49
|
+
if (wait and time_span > 1 and status == 'proceed') or not wait:
|
|
50
|
+
body = self.__convert_body(emit='client.emit', payload=payload, status=status, clients=clients)
|
|
51
|
+
async_to_sync(self.send)(VALAR_CHANNEL_GROUP, body)
|
|
52
|
+
self.start_time = current_time
|
|
53
|
+
|
|
54
|
+
def broadcast(self, payload, status):
|
|
55
|
+
body = self.__convert_body(emit='broadcast.emit', payload=payload, status=status)
|
|
56
|
+
async_to_sync(self.send)(VALAR_CHANNEL_GROUP, body)
|
|
57
|
+
|
|
58
|
+
def register(self):
|
|
59
|
+
body = self.__convert_body(emit='register.emit', payload=None, status=None,clients=[self.client], users=[self.uid])
|
|
60
|
+
async_to_sync(self.send)(VALAR_CHANNEL_GROUP, body)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
|
|
2
|
+
from .executer import execute_channel
|
|
3
|
+
from .mapping import ChannelMapping
|
|
4
|
+
from .sender import ValarSocketSender
|
|
5
|
+
from ..core.response import ValarResponse
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def handel_channel(request, handler):
|
|
9
|
+
sender = ValarSocketSender(request)
|
|
10
|
+
method = ChannelMapping().get_handler(handler)
|
|
11
|
+
await execute_channel(method, sender)
|
|
12
|
+
return ValarResponse(True)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
class Counter:
|
|
2
|
+
def __init__(self, array_or_int):
|
|
3
|
+
self.length = array_or_int if isinstance(array_or_int,int) else len(array_or_int)
|
|
4
|
+
self.index = 0
|
|
5
|
+
|
|
6
|
+
def tick(self):
|
|
7
|
+
self.index += 1
|
|
8
|
+
percentage = round(self.index * 100 / self.length)
|
|
9
|
+
return {"index": self.index, "length": self.length, "percentage": percentage}
|
|
File without changes
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
def array2tree(data, mapping):
|
|
2
|
+
mapping = mapping or {}
|
|
3
|
+
lookup = {}
|
|
4
|
+
for array in data:
|
|
5
|
+
for i in range(len(array)):
|
|
6
|
+
key = '/'.join(array[0:i+1])
|
|
7
|
+
item = mapping.get(key, {})
|
|
8
|
+
value = item.get('value', array[i])
|
|
9
|
+
label = item.get('label', value)
|
|
10
|
+
display = item.get('display')
|
|
11
|
+
item = lookup.get(key, {'value': value,'label':label,'display':display})
|
|
12
|
+
if i < len(array) -1:
|
|
13
|
+
item['children'] = item.get('children', [])
|
|
14
|
+
lookup[key] = item
|
|
15
|
+
if i > 0:
|
|
16
|
+
parent = '/'.join(array[0:i])
|
|
17
|
+
lookup[parent]['children'].append(lookup[key])
|
|
18
|
+
return [lookup[root] for root in [*set([array[0] for array in data])]]
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from .dao_mon import MonDao
|
|
2
|
+
from .dao_orm import OrmDao
|
|
3
|
+
from ..dao_abstract import AbstractDao
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Dao(AbstractDao):
|
|
7
|
+
|
|
8
|
+
def __init__(self, entity, db='orm'):
|
|
9
|
+
self.entity = entity
|
|
10
|
+
self.db = db
|
|
11
|
+
self.dao: AbstractDao = OrmDao(entity) if db == 'orm' else MonDao(entity)
|
|
12
|
+
|
|
13
|
+
def get_model(self):
|
|
14
|
+
self.dao.get_model()
|
|
15
|
+
|
|
16
|
+
def save_one(self, item):
|
|
17
|
+
return self.dao.save_one(item)
|
|
18
|
+
|
|
19
|
+
def delete_one(self, _id):
|
|
20
|
+
return self.dao.delete_one(_id)
|
|
21
|
+
|
|
22
|
+
def find_one(self, _id):
|
|
23
|
+
return self.dao.find_one(_id)
|
|
24
|
+
|
|
25
|
+
def find(self, conditions=None, orders=None, size=0, page=1):
|
|
26
|
+
return self.dao.find(conditions, orders, size, page)
|
|
27
|
+
|
|
28
|
+
def update(self, template, conditions):
|
|
29
|
+
return self.dao.update(template, conditions)
|
|
30
|
+
|
|
31
|
+
def delete(self, conditions):
|
|
32
|
+
return self.dao.delete(conditions)
|
|
33
|
+
|
|
34
|
+
def transform(self, o, code=None):
|
|
35
|
+
return self.dao.transform(o)
|
|
36
|
+
|
|
37
|
+
def tree(self, root, conditions=None):
|
|
38
|
+
return self.dao.tree(conditions, root)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# def values(self, props, conditions, orders=None):
|
|
43
|
+
# pass
|
|
44
|
+
#
|
|
45
|
+
# def group(self, props, conditions, orders=None):
|
|
46
|
+
# pass
|
|
47
|
+
#
|
|
48
|
+
# def count(self, props, conditions):
|
|
49
|
+
# pass
|
|
50
|
+
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from pymongo.results import InsertOneResult
|
|
2
|
+
from pymongo.synchronous.cursor import Cursor
|
|
3
|
+
|
|
4
|
+
from .engine import DaoEngine
|
|
5
|
+
from .model_mon import MonModel
|
|
6
|
+
from .query_mon import MonQuery
|
|
7
|
+
from ..dao_abstract import AbstractDao
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class MonDao(AbstractDao):
|
|
11
|
+
def __init__(self, entity):
|
|
12
|
+
self.entity = entity
|
|
13
|
+
engine = DaoEngine().mon
|
|
14
|
+
self.model:MonModel = engine.get_model(entity)
|
|
15
|
+
self.objects = self.model.manager
|
|
16
|
+
|
|
17
|
+
def get_model(self):
|
|
18
|
+
return self.model
|
|
19
|
+
|
|
20
|
+
def save_one(self, item):
|
|
21
|
+
oid, item = self.model.detach_item(item)
|
|
22
|
+
if oid:
|
|
23
|
+
self.objects.update_one({'_id': oid}, {'$set': item})
|
|
24
|
+
else:
|
|
25
|
+
bean: InsertOneResult = self.objects.insert_one(item)
|
|
26
|
+
oid = bean.inserted_id
|
|
27
|
+
self.objects.update_one({'_id': oid}, {'$set': {'sort': str(oid)}})
|
|
28
|
+
return self.objects.find_one({'_id': oid})
|
|
29
|
+
|
|
30
|
+
def delete_one(self, _id):
|
|
31
|
+
oid = self.model.object_id(_id)
|
|
32
|
+
flag = False
|
|
33
|
+
if oid:
|
|
34
|
+
self.objects.delete_one({'_id': oid})
|
|
35
|
+
flag = True
|
|
36
|
+
return flag
|
|
37
|
+
|
|
38
|
+
def find_one(self, _id):
|
|
39
|
+
oid = self.model.object_id(_id)
|
|
40
|
+
return self.objects.find_one({'_id': oid}) if oid else None
|
|
41
|
+
|
|
42
|
+
def find(self, conditions=None, orders=None, size=0, page=1):
|
|
43
|
+
query = MonQuery(conditions, orders)
|
|
44
|
+
skip = (page - 1) * size
|
|
45
|
+
total = self.objects.count_documents(query.finder)
|
|
46
|
+
cursor = self.objects.find(query.finder, query.orders).skip(skip)
|
|
47
|
+
if size:
|
|
48
|
+
cursor = cursor.limit(size)
|
|
49
|
+
return cursor, total
|
|
50
|
+
|
|
51
|
+
def update(self, template, conditions):
|
|
52
|
+
if template and len(template.keys()):
|
|
53
|
+
oid, item = self.model.detach_item(template)
|
|
54
|
+
query = MonQuery(conditions)
|
|
55
|
+
self.objects.update_many(query.finder, {'$set': item})
|
|
56
|
+
return True
|
|
57
|
+
return False
|
|
58
|
+
|
|
59
|
+
def delete(self, conditions):
|
|
60
|
+
query = MonQuery(conditions)
|
|
61
|
+
self.objects.delete_many(query.finder)
|
|
62
|
+
|
|
63
|
+
def transform(self, o, code=None):
|
|
64
|
+
if isinstance(o, Cursor):
|
|
65
|
+
return [__to_item__(doc) for doc in o]
|
|
66
|
+
else:
|
|
67
|
+
return __to_item__(o)
|
|
68
|
+
|
|
69
|
+
def tree(self, root, conditions=None):
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def __to_item__(o):
|
|
74
|
+
o['id'] = str(o['_id'])
|
|
75
|
+
del o['_id']
|
|
76
|
+
return o
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from django.core.paginator import Paginator
|
|
3
|
+
from django.db.models import QuerySet
|
|
4
|
+
|
|
5
|
+
from .engine import DaoEngine
|
|
6
|
+
from .query_orm import OrmQuery
|
|
7
|
+
from ..dao_abstract import AbstractDao
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class OrmDao(AbstractDao):
|
|
11
|
+
def __init__(self, entity):
|
|
12
|
+
self.entity = entity
|
|
13
|
+
engine = DaoEngine().orm
|
|
14
|
+
self.model = engine.get_model(entity)
|
|
15
|
+
self.objects = self.model.manager
|
|
16
|
+
|
|
17
|
+
def get_model(self):
|
|
18
|
+
return self.model
|
|
19
|
+
|
|
20
|
+
def save_one(self, item):
|
|
21
|
+
oid, simple_item, complex_item = self.model.detach_item(item)
|
|
22
|
+
query_set = self.objects.filter(id=oid) if oid else []
|
|
23
|
+
if len(query_set):
|
|
24
|
+
simple_item['modify_time'] = datetime.datetime.now()
|
|
25
|
+
query_set.update(**simple_item)
|
|
26
|
+
bean = query_set.first()
|
|
27
|
+
else:
|
|
28
|
+
bean = self.objects.create(**simple_item)
|
|
29
|
+
bean.sort = bean.id
|
|
30
|
+
bean.save()
|
|
31
|
+
self.model.save_complex_field(complex_item, bean)
|
|
32
|
+
bean.save()
|
|
33
|
+
return bean
|
|
34
|
+
|
|
35
|
+
def delete_one(self, _id):
|
|
36
|
+
oid = self.model.object_id(_id)
|
|
37
|
+
flag = False
|
|
38
|
+
if oid:
|
|
39
|
+
query_set = self.objects.filter(id=oid)
|
|
40
|
+
self.model.remove_files(query_set)
|
|
41
|
+
query_set.delete()
|
|
42
|
+
flag = True
|
|
43
|
+
return flag
|
|
44
|
+
|
|
45
|
+
def find_one(self, _id):
|
|
46
|
+
oid = self.model.object_id(_id)
|
|
47
|
+
return self.objects.filter(id=oid).first() if oid else None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def find(self, conditions=None, orders=None,size=0, page=1):
|
|
52
|
+
query = OrmQuery(conditions, orders)
|
|
53
|
+
query_set = self.objects.filter(query.includes).exclude(query.excludes).order_by(*query.orders)
|
|
54
|
+
total = query_set.count()
|
|
55
|
+
if size:
|
|
56
|
+
paginator = Paginator(query_set, size)
|
|
57
|
+
query_set = paginator.page(page).object_list
|
|
58
|
+
return query_set, total
|
|
59
|
+
|
|
60
|
+
def update(self, template, conditions):
|
|
61
|
+
if template and len(template.keys()):
|
|
62
|
+
oid, simple_item, complex_item = self.model.detach_item(template)
|
|
63
|
+
query_set, total = self.find(conditions)
|
|
64
|
+
query_set.update(**simple_item)
|
|
65
|
+
return True
|
|
66
|
+
return False
|
|
67
|
+
|
|
68
|
+
def delete(self, conditions):
|
|
69
|
+
query_set, total = self.find(conditions)
|
|
70
|
+
self.model.remove_files(query_set)
|
|
71
|
+
query_set.delete()
|
|
72
|
+
|
|
73
|
+
def transform(self, o, code=None):
|
|
74
|
+
if isinstance(o, QuerySet):
|
|
75
|
+
return self.model.to_dict(o, code)
|
|
76
|
+
else:
|
|
77
|
+
return o.full()
|
|
78
|
+
|
|
79
|
+
def tree(self, root, conditions=None):
|
|
80
|
+
all_set, _ = self.find([])
|
|
81
|
+
query = OrmQuery(conditions)
|
|
82
|
+
if query.is_empty(conditions):
|
|
83
|
+
return all_set
|
|
84
|
+
values = all_set.values('id', 'pid')
|
|
85
|
+
mapping = {item['id']: item['pid'] for item in values}
|
|
86
|
+
results, _ = self.find(conditions)
|
|
87
|
+
id_set = {root}
|
|
88
|
+
for item in results:
|
|
89
|
+
_id = item.id
|
|
90
|
+
route = []
|
|
91
|
+
while _id is not None:
|
|
92
|
+
route.append(_id)
|
|
93
|
+
_id = mapping.get(_id)
|
|
94
|
+
if root in route:
|
|
95
|
+
id_set.update(route)
|
|
96
|
+
return all_set.filter(id__in=id_set).order_by('-sort')
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
|
|
2
|
+
import json
|
|
3
|
+
from io import BytesIO
|
|
4
|
+
|
|
5
|
+
from django.conf import settings
|
|
6
|
+
from minio import Minio
|
|
7
|
+
from urllib3 import BaseHTTPResponse
|
|
8
|
+
|
|
9
|
+
from ..singleton_meta import SingletonMeta
|
|
10
|
+
|
|
11
|
+
class MinioEngine(metaclass=SingletonMeta):
|
|
12
|
+
|
|
13
|
+
def __int__(self):
|
|
14
|
+
self.client = Minio(**settings.MINIO_SETTINGS)
|
|
15
|
+
|
|
16
|
+
def __load_bucket__(self, bucket_name):
|
|
17
|
+
if not self.client.bucket_exists(bucket_name):
|
|
18
|
+
self.client.make_bucket(bucket_name)
|
|
19
|
+
policy = __generate_policy__(bucket_name)
|
|
20
|
+
self.client.set_bucket_policy(bucket_name, policy)
|
|
21
|
+
|
|
22
|
+
@staticmethod
|
|
23
|
+
def get_object_name(_id, prop, file_name):
|
|
24
|
+
return f"{_id}-{prop}-{file_name}"
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def get_bucket_name(entity):
|
|
28
|
+
name = f'{settings.BASE_DIR.name}.{entity}'
|
|
29
|
+
return name.replace('_', '-').lower()
|
|
30
|
+
|
|
31
|
+
def upload(self, bucket_name, object_name, _bytes):
|
|
32
|
+
self.__load_bucket__(bucket_name)
|
|
33
|
+
file_data = BytesIO(_bytes)
|
|
34
|
+
file_size = len(_bytes) # file.siz
|
|
35
|
+
self.client.put_object(
|
|
36
|
+
bucket_name=bucket_name,
|
|
37
|
+
object_name=object_name,
|
|
38
|
+
data=file_data,
|
|
39
|
+
length=file_size
|
|
40
|
+
)
|
|
41
|
+
return f'{bucket_name}/{object_name}'
|
|
42
|
+
|
|
43
|
+
def remove_path(self, path):
|
|
44
|
+
bucket_name, object_name = path.split('/')
|
|
45
|
+
self.remove(bucket_name, object_name)
|
|
46
|
+
|
|
47
|
+
def remove(self, bucket_name, object_name):
|
|
48
|
+
self.client.remove_object(
|
|
49
|
+
bucket_name=bucket_name,
|
|
50
|
+
object_name=object_name
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def read(self, bucket_name, object_name) -> BytesIO:
|
|
54
|
+
ret: BaseHTTPResponse = self.client.get_object(bucket_name=bucket_name, object_name=object_name)
|
|
55
|
+
return BytesIO(ret.read())
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def __generate_policy__(bucket_name):
|
|
59
|
+
return json.dumps({
|
|
60
|
+
"Version": "2012-10-17",
|
|
61
|
+
"Statement": [
|
|
62
|
+
{
|
|
63
|
+
"Sid": "",
|
|
64
|
+
"Effect": "Allow",
|
|
65
|
+
"Principal": {"AWS": "*"},
|
|
66
|
+
"Action": "s3:GetBucketLocation",
|
|
67
|
+
"Resource": f"arn:aws:s3:::{bucket_name}"
|
|
68
|
+
},
|
|
69
|
+
{
|
|
70
|
+
"Sid": "",
|
|
71
|
+
"Effect": "Allow",
|
|
72
|
+
"Principal": {"AWS": "*"},
|
|
73
|
+
"Action": "s3:ListBucket",
|
|
74
|
+
"Resource": f"arn:aws:s3:::{bucket_name}"
|
|
75
|
+
},
|
|
76
|
+
{
|
|
77
|
+
"Sid": "",
|
|
78
|
+
"Effect": "Allow",
|
|
79
|
+
"Principal": {"AWS": "*"},
|
|
80
|
+
"Action": "s3:GetObject",
|
|
81
|
+
"Resource": f"arn:aws:s3:::{bucket_name}/*"
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
"Sid": "",
|
|
85
|
+
"Effect": "Allow",
|
|
86
|
+
"Principal": {"AWS": "*"},
|
|
87
|
+
"Action": "s3:PutObject",
|
|
88
|
+
"Resource": f"arn:aws:s3:::{bucket_name}/*"
|
|
89
|
+
}
|
|
90
|
+
]})
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import pymongo
|
|
2
|
+
from django.conf import settings
|
|
3
|
+
|
|
4
|
+
from .model_mon import MonModel
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class MonEngine:
|
|
8
|
+
|
|
9
|
+
def __init__(self):
|
|
10
|
+
uri = f'mongodb://localhost:27017/'
|
|
11
|
+
mongo = settings.MONGO
|
|
12
|
+
if mongo:
|
|
13
|
+
param = ['host', 'port', 'username', 'password']
|
|
14
|
+
host, port, username, password = [mongo.get(p) for p in param]
|
|
15
|
+
uri = f'mongodb://{username}:{password}@{host}:{port}/'
|
|
16
|
+
|
|
17
|
+
client = pymongo.MongoClient(uri, **{
|
|
18
|
+
'maxPoolSize': 10,
|
|
19
|
+
'minPoolSize': 0,
|
|
20
|
+
'maxIdleTimeMS': 10000,
|
|
21
|
+
'connectTimeoutMS': 10000,
|
|
22
|
+
'socketTimeoutMS': 10000,
|
|
23
|
+
'serverSelectionTimeoutMS': 10000,
|
|
24
|
+
})
|
|
25
|
+
database = client[settings.BASE_APP]
|
|
26
|
+
self.uri = uri
|
|
27
|
+
self.client = client
|
|
28
|
+
self.database = database
|
|
29
|
+
|
|
30
|
+
def get_mapping(self):
|
|
31
|
+
return {col['name']: self.database[col['name']] for col in self.database.list_collections()}
|
|
32
|
+
|
|
33
|
+
def get_model(self,entity)->MonModel:
|
|
34
|
+
return MonModel(self.database, entity)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from django.apps import apps
|
|
2
|
+
|
|
3
|
+
from .model_orm import OrmModel
|
|
4
|
+
from ..valar_models import VModel
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class OrmEngine:
|
|
8
|
+
|
|
9
|
+
def __init__(self):
|
|
10
|
+
mapping = {}
|
|
11
|
+
for model in apps.get_models():
|
|
12
|
+
if issubclass(model, VModel):
|
|
13
|
+
path, name = model.__module__, model.__name__
|
|
14
|
+
prefix = 'src.valar.' if path.startswith('src') else 'valar.'
|
|
15
|
+
app = path.replace('.models', '').replace(prefix, '')
|
|
16
|
+
entity = '%s.%s' % (app, name)
|
|
17
|
+
mapping[entity] = model
|
|
18
|
+
self.mapping = mapping
|
|
19
|
+
|
|
20
|
+
def get_mapping(self)->dict:
|
|
21
|
+
return self.mapping
|
|
22
|
+
|
|
23
|
+
def get_model(self,entity)->OrmModel:
|
|
24
|
+
mod = self.mapping.get(entity)
|
|
25
|
+
return OrmModel(mod, entity)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from bson import ObjectId
|
|
2
|
+
from bson.errors import InvalidId
|
|
3
|
+
from pymongo.synchronous.collection import Collection
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class MonModel:
|
|
7
|
+
|
|
8
|
+
def __init__(self, database, entity):
|
|
9
|
+
self.entity = entity
|
|
10
|
+
self.name = entity.replace('.', '_')
|
|
11
|
+
self.manager: Collection = database[self.name]
|
|
12
|
+
|
|
13
|
+
@staticmethod
|
|
14
|
+
def object_id(_id):
|
|
15
|
+
try:
|
|
16
|
+
return ObjectId(_id)
|
|
17
|
+
except(InvalidId, TypeError):
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
def detach_item(self, item):
|
|
21
|
+
_id = item.get('id')
|
|
22
|
+
if _id:
|
|
23
|
+
del item['id']
|
|
24
|
+
return self.object_id(_id), item
|