lesscode-flask 0.0.27__tar.gz → 0.0.31__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/PKG-INFO +1 -2
- lesscode_flask-0.0.31/lesscode_flask/__init__.py +55 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/app.py +19 -18
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/response_result.py +1 -1
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/base_service.py +46 -10
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/setting/__init__.py +2 -1
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/setup/__init__.py +2 -1
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/helpers.py +38 -16
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask.egg-info/PKG-INFO +1 -2
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask.egg-info/SOURCES.txt +2 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask.egg-info/requires.txt +0 -1
- lesscode_flask-0.0.31/redash/query_runner/elasticsearch.py +515 -0
- lesscode_flask-0.0.31/redash/utils/__init__.py +71 -0
- lesscode_flask-0.0.27/lesscode_flask/__init__.py +0 -1
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/README.md +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/db/__init__.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/db/datasource.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/db/executor.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/log/access_log_handler.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/access_log.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/auth_client.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/auth_permission.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/base_model.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/parameterized_query.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/model/user.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/access_log_service.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/auth_client_service.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/auth_permission_service.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/authentication_service.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/__init__.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/decorator/__init__.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/decorator/cache.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/decorator/swagger.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/file/file_exporter.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/json/NotSortJSONProvider.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/oss/__init__.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/oss/ks3_oss.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/redis/redis_helper.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/request/request.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/swagger/swagger_template.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/swagger/swagger_util.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/wsgi.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask.egg-info/dependency_links.txt +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask.egg-info/top_level.txt +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/query_runner/__init__.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/query_runner/clickhouse.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/query_runner/kingbase.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/query_runner/mysql.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/query_runner/pg.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/settings/__init__.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/settings/helpers.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/redash/utils/requests_session.py +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/setup.cfg +0 -0
- {lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lesscode-flask
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.31
|
|
4
4
|
Summary: lesscode-flask 是基于flask的web开发脚手架项目,该项目初衷为简化开发过程,让研发人员更加关注业务。
|
|
5
5
|
Home-page: https://lesscode-flask
|
|
6
6
|
Author: Chao.yy
|
|
@@ -31,7 +31,6 @@ Requires-Dist: pystache==0.6.5
|
|
|
31
31
|
Requires-Dist: flask-swagger-ui==4.11.1
|
|
32
32
|
Requires-Dist: lesscode-utils==0.0.61
|
|
33
33
|
Requires-Dist: Flask-Login==0.6.3
|
|
34
|
-
Requires-Dist: gevent==24.2.1
|
|
35
34
|
|
|
36
35
|
# lesscode-flask
|
|
37
36
|
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
__version__ = '0.0.31'
|
|
2
|
+
|
|
3
|
+
import functools
|
|
4
|
+
import logging
|
|
5
|
+
import traceback
|
|
6
|
+
|
|
7
|
+
from flask import Blueprint
|
|
8
|
+
from lesscode_flask.utils.decorator.cache import deal_cache
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SQ_Blueprint(Blueprint):
|
|
12
|
+
def __init__(self, name: str, url_prefix: str, **kwargs):
|
|
13
|
+
if not kwargs.get("import_name"):
|
|
14
|
+
kwargs["import_name"] = __name__
|
|
15
|
+
super().__init__(name=name, url_prefix=url_prefix, **kwargs)
|
|
16
|
+
|
|
17
|
+
def decorator_handler(self, title: str, url: str = None, cache_enalbe: bool = False, cache_ex: int = 3600 * 10,
|
|
18
|
+
content_type: str = "json", methods=['POST']):
|
|
19
|
+
options = {"methods": methods}
|
|
20
|
+
|
|
21
|
+
def decorator(func):
|
|
22
|
+
path = url if url else "/{}".format(func.__name__)
|
|
23
|
+
|
|
24
|
+
@functools.wraps(func)
|
|
25
|
+
def wrapper(*args, **kwargs):
|
|
26
|
+
# 如果开启了缓存开关
|
|
27
|
+
if cache_enalbe:
|
|
28
|
+
# 尝试从缓存中获取数据
|
|
29
|
+
try:
|
|
30
|
+
data = deal_cache(func, cache_ex, "", *args, **kwargs)
|
|
31
|
+
except Exception as e:
|
|
32
|
+
logging.error(traceback.format_exc())
|
|
33
|
+
data = func(*args, **kwargs)
|
|
34
|
+
return data
|
|
35
|
+
# 如果没有开启缓存,或者缓存未命中,则执行原始函数
|
|
36
|
+
data = func(*args, **kwargs)
|
|
37
|
+
return data
|
|
38
|
+
|
|
39
|
+
# 添加 URL 规则到 Flask 路由
|
|
40
|
+
self.add_url_rule(path, None, wrapper, **options)
|
|
41
|
+
return wrapper
|
|
42
|
+
|
|
43
|
+
decorator._title = title
|
|
44
|
+
decorator._request_type = content_type
|
|
45
|
+
return decorator
|
|
46
|
+
|
|
47
|
+
def post_route(self, title: str, url: str = None, cache_enalbe: bool = False, cache_ex: int = 3600 * 10,
|
|
48
|
+
content_type: str = "json", methods=['POST']):
|
|
49
|
+
decorator = self.decorator_handler(title, url, cache_enalbe, cache_ex, content_type, methods)
|
|
50
|
+
return decorator
|
|
51
|
+
|
|
52
|
+
def get_route(self, title: str, url: str = None, cache_enalbe: bool = False, cache_ex: int = 3600 * 10,
|
|
53
|
+
content_type: str = "json", methods=['GET']):
|
|
54
|
+
decorator = self.decorator_handler(title, url, cache_enalbe, cache_ex, content_type, methods)
|
|
55
|
+
return decorator
|
|
@@ -121,24 +121,25 @@ def create_app():
|
|
|
121
121
|
request.request_id = request_id
|
|
122
122
|
# 记录请求开始时间
|
|
123
123
|
request_start_time[request_id] = time.time()
|
|
124
|
-
#
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
if
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
124
|
+
if app.config.get("AUTHORIZATION_ENABLE"): # 启动 AUTHORIZATION_ENABLE 才进行权限验证
|
|
125
|
+
# 获取当前请求的url
|
|
126
|
+
url = request.path
|
|
127
|
+
# 获取URL 对应的id 与访问权限
|
|
128
|
+
id, access = RedisHelper(app_config.get("REDIS_OAUTH_KEY", "redis")).sync_hmget(f"upms:url_info:{url}",
|
|
129
|
+
["id", "access"])
|
|
130
|
+
if not id:
|
|
131
|
+
# 如果没有进行注册的url 默认需要登录权限
|
|
132
|
+
access = app_config.get("AUTH_DEFAULT_ACCESS", "0")
|
|
133
|
+
# '访问权限2:需要权限 1:需要登录 0:游客',
|
|
134
|
+
if str(access) == "1": # 需要登录
|
|
135
|
+
if current_user.is_anonymous_user:
|
|
136
|
+
# abort(403, "需要登录")
|
|
137
|
+
ResponseResult.fail("请登录后访问", status_code="403")
|
|
138
|
+
elif str(access) == "2": # 需要权限
|
|
139
|
+
if current_user.is_anonymous_user:
|
|
140
|
+
ResponseResult.fail("请登录后访问", status_code="403")
|
|
141
|
+
if not current_user.has_permission(id):
|
|
142
|
+
ResponseResult.fail("请获取授权后访问", status_code="403")
|
|
142
143
|
|
|
143
144
|
@app.after_request
|
|
144
145
|
def after_request(response):
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
|
|
3
3
|
from flask_login import current_user
|
|
4
|
+
|
|
4
5
|
from lesscode_flask.db import db
|
|
5
6
|
from lesscode_flask.model.base_model import BaseModel
|
|
6
|
-
from lesscode_flask.utils.helpers import serialize_result_to_dict, parameter_validation
|
|
7
|
+
from lesscode_flask.utils.helpers import serialize_result_to_dict, parameter_validation, alchemy_result_to_dict
|
|
7
8
|
|
|
8
9
|
logger = logging.getLogger(__name__)
|
|
9
10
|
|
|
@@ -100,31 +101,60 @@ class BaseService:
|
|
|
100
101
|
item = query.one()
|
|
101
102
|
return item
|
|
102
103
|
|
|
103
|
-
def get_items(self, filters: list = None
|
|
104
|
+
def get_items(self, select_columns: list = None, order_columns: list = None, filters: list = None, offset: int = 0,
|
|
105
|
+
size: int = 10):
|
|
104
106
|
"""
|
|
105
107
|
获取列表信息
|
|
108
|
+
:param select_columns:
|
|
109
|
+
:param order_columns:
|
|
106
110
|
:param filters:
|
|
111
|
+
:param offset:
|
|
112
|
+
:param size:
|
|
107
113
|
:return:
|
|
108
114
|
"""
|
|
109
115
|
query = self.model.query
|
|
110
116
|
if filters:
|
|
111
117
|
query = query.filter(*filters)
|
|
112
|
-
|
|
113
|
-
|
|
118
|
+
if order_columns:
|
|
119
|
+
query = query.order_by(*order_columns)
|
|
120
|
+
if offset > 0:
|
|
121
|
+
query = query.offset(offset).limit(size)
|
|
122
|
+
if select_columns:
|
|
123
|
+
query = query.with_entities(*select_columns)
|
|
124
|
+
data = alchemy_result_to_dict(query.all())
|
|
125
|
+
else:
|
|
126
|
+
data = serialize_result_to_dict(query.all())
|
|
127
|
+
return data
|
|
114
128
|
|
|
115
129
|
def delete_item(self, id: str):
|
|
116
|
-
self.model.query.filter_by(id=id).delete()
|
|
117
|
-
return id
|
|
130
|
+
return self.model.query.filter_by(id=id).delete()
|
|
118
131
|
|
|
119
132
|
def delete_items(self, filters: list):
|
|
120
133
|
if filters and len(filters) > 0:
|
|
121
|
-
self.model.query.filter(*filters).delete()
|
|
122
|
-
return
|
|
134
|
+
return self.model.query.filter(*filters).delete()
|
|
135
|
+
return 0
|
|
123
136
|
|
|
124
|
-
def page(self,
|
|
137
|
+
def page(self, select_columns: list = None, columns: list = None, order_columns: list = None, filters: list = None,
|
|
138
|
+
page_num: int = 1,
|
|
139
|
+
page_size: int = 10):
|
|
140
|
+
"""
|
|
141
|
+
分页查询
|
|
142
|
+
:param select_columns:
|
|
143
|
+
:param columns:
|
|
144
|
+
:param order_columns:
|
|
145
|
+
:param filters:
|
|
146
|
+
:param page_num:
|
|
147
|
+
:param page_size:
|
|
148
|
+
:return:
|
|
149
|
+
"""
|
|
125
150
|
query = self.model.query
|
|
126
151
|
if filters:
|
|
127
152
|
query = query.filter(*filters)
|
|
153
|
+
if order_columns:
|
|
154
|
+
query = query.order_by(*order_columns)
|
|
155
|
+
if select_columns:
|
|
156
|
+
query = query.with_entities(*select_columns)
|
|
157
|
+
|
|
128
158
|
pagination = query.paginate(page=page_num, per_page=page_size)
|
|
129
159
|
# 获取当前页的数据
|
|
130
160
|
items = pagination.items
|
|
@@ -132,7 +162,13 @@ class BaseService:
|
|
|
132
162
|
total = pagination.total
|
|
133
163
|
has_prev = pagination.has_prev
|
|
134
164
|
has_next = pagination.has_next
|
|
135
|
-
|
|
165
|
+
if select_columns:
|
|
166
|
+
data = alchemy_result_to_dict(items)
|
|
167
|
+
else:
|
|
168
|
+
data = serialize_result_to_dict(items)
|
|
169
|
+
result = {"columns": columns, "dataSource": data, "total": total,
|
|
136
170
|
"has_prev": has_prev,
|
|
137
171
|
"has_next": has_next}
|
|
172
|
+
if columns:
|
|
173
|
+
result["columns"] = columns
|
|
138
174
|
return result
|
|
@@ -25,6 +25,29 @@ def serialize_result_to_dict(result):
|
|
|
25
25
|
return {k: v for k, v in result.__dict__.items() if not k.startswith('_')}
|
|
26
26
|
|
|
27
27
|
|
|
28
|
+
def alchemy_result_to_dict(result):
|
|
29
|
+
"""
|
|
30
|
+
alchemy 指定字段查询后返回的数据解析为字典
|
|
31
|
+
:param result:
|
|
32
|
+
:return:
|
|
33
|
+
"""
|
|
34
|
+
data_list = []
|
|
35
|
+
if not result:
|
|
36
|
+
return result
|
|
37
|
+
if isinstance(result, list):
|
|
38
|
+
key_list = list(result[0]._fields)
|
|
39
|
+
for d in result:
|
|
40
|
+
dict_data = dict(zip(key_list, d))
|
|
41
|
+
data_list.append(dict_data)
|
|
42
|
+
return data_list
|
|
43
|
+
else:
|
|
44
|
+
if result:
|
|
45
|
+
key_list = list(result._fields)
|
|
46
|
+
return dict(zip(key_list, result))
|
|
47
|
+
else:
|
|
48
|
+
return {}
|
|
49
|
+
|
|
50
|
+
|
|
28
51
|
def generate_uuid():
|
|
29
52
|
"""
|
|
30
53
|
生成UUID
|
|
@@ -106,21 +129,21 @@ def inject_args(req, func, view_args={}):
|
|
|
106
129
|
# 兼容**kwargs 参数
|
|
107
130
|
if parameter.kind == inspect.Parameter.VAR_KEYWORD:
|
|
108
131
|
argument_value = kwargs
|
|
109
|
-
if argument_value:
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
132
|
+
# if argument_value:
|
|
133
|
+
# 获取形参类型
|
|
134
|
+
parameter_type = parameter.annotation
|
|
135
|
+
# 形参类型为空,尝试获取形参默认值类型
|
|
136
|
+
if parameter_type is inspect.Parameter.empty:
|
|
137
|
+
parameter_type = type(parameter.default)
|
|
138
|
+
if parameter_type == int:
|
|
139
|
+
params_dict[parameter_name] = int(argument_value)
|
|
140
|
+
elif parameter_type == float:
|
|
141
|
+
params_dict[parameter_name] = float(argument_value)
|
|
142
|
+
elif parameter_type == bool:
|
|
143
|
+
params_dict[parameter_name] = parse_boolean(argument_value)
|
|
144
|
+
else:
|
|
145
|
+
# 其余都按str处理
|
|
146
|
+
params_dict[parameter_name] = argument_value
|
|
124
147
|
return params_dict
|
|
125
148
|
|
|
126
149
|
|
|
@@ -136,4 +159,3 @@ def mustache_render(template, **kwargs):
|
|
|
136
159
|
template = Template(template)
|
|
137
160
|
# 渲染模板
|
|
138
161
|
return template.render(**kwargs)
|
|
139
|
-
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lesscode-flask
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.31
|
|
4
4
|
Summary: lesscode-flask 是基于flask的web开发脚手架项目,该项目初衷为简化开发过程,让研发人员更加关注业务。
|
|
5
5
|
Home-page: https://lesscode-flask
|
|
6
6
|
Author: Chao.yy
|
|
@@ -31,7 +31,6 @@ Requires-Dist: pystache==0.6.5
|
|
|
31
31
|
Requires-Dist: flask-swagger-ui==4.11.1
|
|
32
32
|
Requires-Dist: lesscode-utils==0.0.61
|
|
33
33
|
Requires-Dist: Flask-Login==0.6.3
|
|
34
|
-
Requires-Dist: gevent==24.2.1
|
|
35
34
|
|
|
36
35
|
# lesscode-flask
|
|
37
36
|
|
|
@@ -41,9 +41,11 @@ lesscode_flask/utils/swagger/swagger_template.py
|
|
|
41
41
|
lesscode_flask/utils/swagger/swagger_util.py
|
|
42
42
|
redash/query_runner/__init__.py
|
|
43
43
|
redash/query_runner/clickhouse.py
|
|
44
|
+
redash/query_runner/elasticsearch.py
|
|
44
45
|
redash/query_runner/kingbase.py
|
|
45
46
|
redash/query_runner/mysql.py
|
|
46
47
|
redash/query_runner/pg.py
|
|
47
48
|
redash/settings/__init__.py
|
|
48
49
|
redash/settings/helpers.py
|
|
50
|
+
redash/utils/__init__.py
|
|
49
51
|
redash/utils/requests_session.py
|
|
@@ -0,0 +1,515 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import sys
|
|
3
|
+
import urllib.request
|
|
4
|
+
import urllib.parse
|
|
5
|
+
import urllib.error
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
from requests.auth import HTTPBasicAuth
|
|
9
|
+
|
|
10
|
+
from redash.query_runner import *
|
|
11
|
+
from redash.utils import json_dumps, json_loads
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
import http.client as http_client
|
|
15
|
+
except ImportError:
|
|
16
|
+
# Python 2
|
|
17
|
+
import http.client as http_client
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
ELASTICSEARCH_TYPES_MAPPING = {
|
|
22
|
+
"integer": TYPE_INTEGER,
|
|
23
|
+
"long": TYPE_INTEGER,
|
|
24
|
+
"float": TYPE_FLOAT,
|
|
25
|
+
"double": TYPE_FLOAT,
|
|
26
|
+
"boolean": TYPE_BOOLEAN,
|
|
27
|
+
"string": TYPE_STRING,
|
|
28
|
+
"date": TYPE_DATE,
|
|
29
|
+
"object": TYPE_STRING,
|
|
30
|
+
# "geo_point" TODO: Need to split to 2 fields somehow
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
ELASTICSEARCH_BUILTIN_FIELDS_MAPPING = {"_id": "Id", "_score": "Score"}
|
|
34
|
+
|
|
35
|
+
PYTHON_TYPES_MAPPING = {
|
|
36
|
+
str: TYPE_STRING,
|
|
37
|
+
bytes: TYPE_STRING,
|
|
38
|
+
bool: TYPE_BOOLEAN,
|
|
39
|
+
int: TYPE_INTEGER,
|
|
40
|
+
float: TYPE_FLOAT,
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class BaseElasticSearch(BaseQueryRunner):
|
|
45
|
+
should_annotate_query = False
|
|
46
|
+
DEBUG_ENABLED = False
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def configuration_schema(cls):
|
|
50
|
+
return {
|
|
51
|
+
"type": "object",
|
|
52
|
+
"properties": {
|
|
53
|
+
"server": {"type": "string", "title": "Base URL"},
|
|
54
|
+
"basic_auth_user": {"type": "string", "title": "Basic Auth User"},
|
|
55
|
+
"basic_auth_password": {
|
|
56
|
+
"type": "string",
|
|
57
|
+
"title": "Basic Auth Password",
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
"order": ["server", "basic_auth_user", "basic_auth_password"],
|
|
61
|
+
"secret": ["basic_auth_password"],
|
|
62
|
+
"required": ["server"],
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def enabled(cls):
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
def __init__(self, configuration):
|
|
70
|
+
super(BaseElasticSearch, self).__init__(configuration)
|
|
71
|
+
self.syntax = "json"
|
|
72
|
+
|
|
73
|
+
if self.DEBUG_ENABLED:
|
|
74
|
+
http_client.HTTPConnection.debuglevel = 1
|
|
75
|
+
|
|
76
|
+
# you need to initialize logging, otherwise you will not see anything from requests
|
|
77
|
+
logging.basicConfig()
|
|
78
|
+
logging.getLogger().setLevel(logging.DEBUG)
|
|
79
|
+
requests_log = logging.getLogger("requests.packages.urllib3")
|
|
80
|
+
requests_log.setLevel(logging.DEBUG)
|
|
81
|
+
requests_log.propagate = True
|
|
82
|
+
|
|
83
|
+
logger.setLevel(logging.DEBUG)
|
|
84
|
+
|
|
85
|
+
self.server_url = self.configuration["server"]
|
|
86
|
+
if self.server_url[-1] == "/":
|
|
87
|
+
self.server_url = self.server_url[:-1]
|
|
88
|
+
|
|
89
|
+
basic_auth_user = self.configuration.get("basic_auth_user", None)
|
|
90
|
+
basic_auth_password = self.configuration.get("basic_auth_password", None)
|
|
91
|
+
self.auth = None
|
|
92
|
+
if basic_auth_user and basic_auth_password:
|
|
93
|
+
self.auth = HTTPBasicAuth(basic_auth_user, basic_auth_password)
|
|
94
|
+
|
|
95
|
+
def _get_mappings(self, url):
|
|
96
|
+
mappings = {}
|
|
97
|
+
error = None
|
|
98
|
+
try:
|
|
99
|
+
r = requests.get(url, auth=self.auth)
|
|
100
|
+
r.raise_for_status()
|
|
101
|
+
|
|
102
|
+
mappings = r.json()
|
|
103
|
+
except requests.HTTPError as e:
|
|
104
|
+
logger.exception(e)
|
|
105
|
+
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(
|
|
106
|
+
r.status_code, r.text
|
|
107
|
+
)
|
|
108
|
+
mappings = None
|
|
109
|
+
except requests.exceptions.RequestException as e:
|
|
110
|
+
logger.exception(e)
|
|
111
|
+
error = "Connection refused"
|
|
112
|
+
mappings = None
|
|
113
|
+
|
|
114
|
+
return mappings, error
|
|
115
|
+
|
|
116
|
+
def _get_query_mappings(self, url):
|
|
117
|
+
mappings_data, error = self._get_mappings(url)
|
|
118
|
+
if error:
|
|
119
|
+
return mappings_data, error
|
|
120
|
+
|
|
121
|
+
mappings = {}
|
|
122
|
+
for index_name in mappings_data:
|
|
123
|
+
index_mappings = mappings_data[index_name]
|
|
124
|
+
for m in index_mappings.get("mappings", {}):
|
|
125
|
+
if "properties" not in index_mappings["mappings"][m]:
|
|
126
|
+
continue
|
|
127
|
+
for property_name in index_mappings["mappings"][m]["properties"]:
|
|
128
|
+
property_data = index_mappings["mappings"][m]["properties"][
|
|
129
|
+
property_name
|
|
130
|
+
]
|
|
131
|
+
if property_name not in mappings:
|
|
132
|
+
property_type = property_data.get("type", None)
|
|
133
|
+
if property_type:
|
|
134
|
+
if property_type in ELASTICSEARCH_TYPES_MAPPING:
|
|
135
|
+
mappings[property_name] = ELASTICSEARCH_TYPES_MAPPING[
|
|
136
|
+
property_type
|
|
137
|
+
]
|
|
138
|
+
else:
|
|
139
|
+
mappings[property_name] = TYPE_STRING
|
|
140
|
+
# raise Exception("Unknown property type: {0}".format(property_type))
|
|
141
|
+
|
|
142
|
+
return mappings, error
|
|
143
|
+
|
|
144
|
+
def get_schema(self, *args, **kwargs):
|
|
145
|
+
def parse_doc(doc, path=None):
|
|
146
|
+
"""Recursively parse a doc type dictionary
|
|
147
|
+
"""
|
|
148
|
+
path = path or []
|
|
149
|
+
result = []
|
|
150
|
+
for field, description in doc["properties"].items():
|
|
151
|
+
if "properties" in description:
|
|
152
|
+
result.extend(parse_doc(description, path + [field]))
|
|
153
|
+
else:
|
|
154
|
+
result.append(".".join(path + [field]))
|
|
155
|
+
return result
|
|
156
|
+
|
|
157
|
+
schema = {}
|
|
158
|
+
url = "{0}/_mappings".format(self.server_url)
|
|
159
|
+
mappings, error = self._get_mappings(url)
|
|
160
|
+
|
|
161
|
+
if mappings:
|
|
162
|
+
# make a schema for each index
|
|
163
|
+
# the index contains a mappings dict with documents
|
|
164
|
+
# in a hierarchical format
|
|
165
|
+
for name, index in mappings.items():
|
|
166
|
+
columns = []
|
|
167
|
+
schema[name] = {"name": name}
|
|
168
|
+
for doc, items in index["mappings"].items():
|
|
169
|
+
columns.extend(parse_doc(items))
|
|
170
|
+
|
|
171
|
+
# remove duplicates
|
|
172
|
+
# sort alphabetically
|
|
173
|
+
schema[name]["columns"] = sorted(set(columns))
|
|
174
|
+
return list(schema.values())
|
|
175
|
+
|
|
176
|
+
def _parse_results(
|
|
177
|
+
self, mappings, result_fields, raw_result, result_columns, result_rows
|
|
178
|
+
):
|
|
179
|
+
def add_column_if_needed(
|
|
180
|
+
mappings, column_name, friendly_name, result_columns, result_columns_index
|
|
181
|
+
):
|
|
182
|
+
if friendly_name not in result_columns_index:
|
|
183
|
+
result_columns.append(
|
|
184
|
+
{
|
|
185
|
+
"name": friendly_name,
|
|
186
|
+
"friendly_name": friendly_name,
|
|
187
|
+
"type": mappings.get(column_name, "string"),
|
|
188
|
+
}
|
|
189
|
+
)
|
|
190
|
+
result_columns_index[friendly_name] = result_columns[-1]
|
|
191
|
+
|
|
192
|
+
def get_row(rows, row):
|
|
193
|
+
if row is None:
|
|
194
|
+
row = {}
|
|
195
|
+
rows.append(row)
|
|
196
|
+
return row
|
|
197
|
+
|
|
198
|
+
def collect_value(mappings, row, key, value, type):
|
|
199
|
+
if result_fields and key not in result_fields_index:
|
|
200
|
+
return
|
|
201
|
+
|
|
202
|
+
mappings[key] = type
|
|
203
|
+
add_column_if_needed(
|
|
204
|
+
mappings, key, key, result_columns, result_columns_index
|
|
205
|
+
)
|
|
206
|
+
row[key] = value
|
|
207
|
+
|
|
208
|
+
def collect_aggregations(
|
|
209
|
+
mappings, rows, parent_key, data, row, result_columns, result_columns_index
|
|
210
|
+
):
|
|
211
|
+
if isinstance(data, dict):
|
|
212
|
+
for key, value in data.items():
|
|
213
|
+
val = collect_aggregations(
|
|
214
|
+
mappings,
|
|
215
|
+
rows,
|
|
216
|
+
parent_key if key == "buckets" else key,
|
|
217
|
+
value,
|
|
218
|
+
row,
|
|
219
|
+
result_columns,
|
|
220
|
+
result_columns_index,
|
|
221
|
+
)
|
|
222
|
+
if val:
|
|
223
|
+
row = get_row(rows, row)
|
|
224
|
+
collect_value(mappings, row, key, val, "long")
|
|
225
|
+
|
|
226
|
+
for data_key in ["value", "doc_count"]:
|
|
227
|
+
if data_key not in data:
|
|
228
|
+
continue
|
|
229
|
+
if "key" in data and len(list(data.keys())) == 2:
|
|
230
|
+
key_is_string = "key_as_string" in data
|
|
231
|
+
collect_value(
|
|
232
|
+
mappings,
|
|
233
|
+
row,
|
|
234
|
+
data["key"] if not key_is_string else data["key_as_string"],
|
|
235
|
+
data[data_key],
|
|
236
|
+
"long" if not key_is_string else "string",
|
|
237
|
+
)
|
|
238
|
+
else:
|
|
239
|
+
return data[data_key]
|
|
240
|
+
|
|
241
|
+
elif isinstance(data, list):
|
|
242
|
+
for value in data:
|
|
243
|
+
result_row = get_row(rows, row)
|
|
244
|
+
collect_aggregations(
|
|
245
|
+
mappings,
|
|
246
|
+
rows,
|
|
247
|
+
parent_key,
|
|
248
|
+
value,
|
|
249
|
+
result_row,
|
|
250
|
+
result_columns,
|
|
251
|
+
result_columns_index,
|
|
252
|
+
)
|
|
253
|
+
if "doc_count" in value:
|
|
254
|
+
collect_value(
|
|
255
|
+
mappings,
|
|
256
|
+
result_row,
|
|
257
|
+
"doc_count",
|
|
258
|
+
value["doc_count"],
|
|
259
|
+
"integer",
|
|
260
|
+
)
|
|
261
|
+
if "key" in value:
|
|
262
|
+
if "key_as_string" in value:
|
|
263
|
+
collect_value(
|
|
264
|
+
mappings,
|
|
265
|
+
result_row,
|
|
266
|
+
parent_key,
|
|
267
|
+
value["key_as_string"],
|
|
268
|
+
"string",
|
|
269
|
+
)
|
|
270
|
+
else:
|
|
271
|
+
collect_value(
|
|
272
|
+
mappings, result_row, parent_key, value["key"], "string"
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
return None
|
|
276
|
+
|
|
277
|
+
result_columns_index = {c["name"]: c for c in result_columns}
|
|
278
|
+
|
|
279
|
+
result_fields_index = {}
|
|
280
|
+
if result_fields:
|
|
281
|
+
for r in result_fields:
|
|
282
|
+
result_fields_index[r] = None
|
|
283
|
+
|
|
284
|
+
if "error" in raw_result:
|
|
285
|
+
error = raw_result["error"]
|
|
286
|
+
if len(error) > 10240:
|
|
287
|
+
error = error[:10240] + "... continues"
|
|
288
|
+
|
|
289
|
+
raise Exception(error)
|
|
290
|
+
elif "aggregations" in raw_result:
|
|
291
|
+
if result_fields:
|
|
292
|
+
for field in result_fields:
|
|
293
|
+
add_column_if_needed(
|
|
294
|
+
mappings, field, field, result_columns, result_columns_index
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
for key, data in raw_result["aggregations"].items():
|
|
298
|
+
collect_aggregations(
|
|
299
|
+
mappings,
|
|
300
|
+
result_rows,
|
|
301
|
+
key,
|
|
302
|
+
data,
|
|
303
|
+
None,
|
|
304
|
+
result_columns,
|
|
305
|
+
result_columns_index,
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
logger.debug("result_rows %s", str(result_rows))
|
|
309
|
+
logger.debug("result_columns %s", str(result_columns))
|
|
310
|
+
elif "hits" in raw_result and "hits" in raw_result["hits"]:
|
|
311
|
+
if result_fields:
|
|
312
|
+
for field in result_fields:
|
|
313
|
+
add_column_if_needed(
|
|
314
|
+
mappings, field, field, result_columns, result_columns_index
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
for h in raw_result["hits"]["hits"]:
|
|
318
|
+
row = {}
|
|
319
|
+
|
|
320
|
+
column_name = "_source" if "_source" in h else "fields"
|
|
321
|
+
for column in h[column_name]:
|
|
322
|
+
if result_fields and column not in result_fields_index:
|
|
323
|
+
continue
|
|
324
|
+
|
|
325
|
+
add_column_if_needed(
|
|
326
|
+
mappings, column, column, result_columns, result_columns_index
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
value = h[column_name][column]
|
|
330
|
+
row[column] = (
|
|
331
|
+
value[0]
|
|
332
|
+
if isinstance(value, list) and len(value) == 1
|
|
333
|
+
else value
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
result_rows.append(row)
|
|
337
|
+
else:
|
|
338
|
+
raise Exception(
|
|
339
|
+
"Redash failed to parse the results it got from Elasticsearch."
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
def test_connection(self):
|
|
343
|
+
try:
|
|
344
|
+
r = requests.get(
|
|
345
|
+
"{0}/_cluster/health".format(self.server_url), auth=self.auth
|
|
346
|
+
)
|
|
347
|
+
r.raise_for_status()
|
|
348
|
+
except requests.HTTPError as e:
|
|
349
|
+
logger.exception(e)
|
|
350
|
+
raise Exception(
|
|
351
|
+
"Failed to execute query. Return Code: {0} Reason: {1}".format(
|
|
352
|
+
r.status_code, r.text
|
|
353
|
+
)
|
|
354
|
+
)
|
|
355
|
+
except requests.exceptions.RequestException as e:
|
|
356
|
+
logger.exception(e)
|
|
357
|
+
raise Exception("Connection refused")
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
class Kibana(BaseElasticSearch):
|
|
361
|
+
@classmethod
|
|
362
|
+
def enabled(cls):
|
|
363
|
+
return True
|
|
364
|
+
|
|
365
|
+
def _execute_simple_query(
|
|
366
|
+
self, url, auth, _from, mappings, result_fields, result_columns, result_rows
|
|
367
|
+
):
|
|
368
|
+
url += "&from={0}".format(_from)
|
|
369
|
+
r = requests.get(url, auth=self.auth)
|
|
370
|
+
r.raise_for_status()
|
|
371
|
+
|
|
372
|
+
raw_result = r.json()
|
|
373
|
+
|
|
374
|
+
self._parse_results(
|
|
375
|
+
mappings, result_fields, raw_result, result_columns, result_rows
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
total = raw_result["hits"]["total"]
|
|
379
|
+
result_size = len(raw_result["hits"]["hits"])
|
|
380
|
+
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
|
|
381
|
+
|
|
382
|
+
return raw_result["hits"]["total"]
|
|
383
|
+
|
|
384
|
+
def run_query(self, query, user):
|
|
385
|
+
try:
|
|
386
|
+
error = None
|
|
387
|
+
|
|
388
|
+
logger.debug(query)
|
|
389
|
+
query_params = json_loads(query)
|
|
390
|
+
|
|
391
|
+
index_name = query_params["index"]
|
|
392
|
+
query_data = query_params["query"]
|
|
393
|
+
size = int(query_params.get("size", 500))
|
|
394
|
+
limit = int(query_params.get("limit", 500))
|
|
395
|
+
result_fields = query_params.get("fields", None)
|
|
396
|
+
sort = query_params.get("sort", None)
|
|
397
|
+
|
|
398
|
+
if not self.server_url:
|
|
399
|
+
error = "Missing configuration key 'server'"
|
|
400
|
+
return None, error
|
|
401
|
+
|
|
402
|
+
url = "{0}/{1}/_search?".format(self.server_url, index_name)
|
|
403
|
+
mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name)
|
|
404
|
+
|
|
405
|
+
mappings, error = self._get_query_mappings(mapping_url)
|
|
406
|
+
if error:
|
|
407
|
+
return None, error
|
|
408
|
+
|
|
409
|
+
if sort:
|
|
410
|
+
url += "&sort={0}".format(urllib.parse.quote_plus(sort))
|
|
411
|
+
|
|
412
|
+
url += "&q={0}".format(urllib.parse.quote_plus(query_data))
|
|
413
|
+
|
|
414
|
+
logger.debug("Using URL: {0}".format(url))
|
|
415
|
+
logger.debug("Using Query: {0}".format(query_data))
|
|
416
|
+
|
|
417
|
+
result_columns = []
|
|
418
|
+
result_rows = []
|
|
419
|
+
if isinstance(query_data, str):
|
|
420
|
+
_from = 0
|
|
421
|
+
while True:
|
|
422
|
+
query_size = size if limit >= (_from + size) else (limit - _from)
|
|
423
|
+
total = self._execute_simple_query(
|
|
424
|
+
url + "&size={0}".format(query_size),
|
|
425
|
+
self.auth,
|
|
426
|
+
_from,
|
|
427
|
+
mappings,
|
|
428
|
+
result_fields,
|
|
429
|
+
result_columns,
|
|
430
|
+
result_rows,
|
|
431
|
+
)
|
|
432
|
+
_from += size
|
|
433
|
+
if _from >= limit:
|
|
434
|
+
break
|
|
435
|
+
else:
|
|
436
|
+
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
|
437
|
+
raise Exception("Advanced queries are not supported")
|
|
438
|
+
|
|
439
|
+
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
|
|
440
|
+
except requests.HTTPError as e:
|
|
441
|
+
logger.exception(e)
|
|
442
|
+
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(
|
|
443
|
+
r.status_code, r.text
|
|
444
|
+
)
|
|
445
|
+
json_data = None
|
|
446
|
+
except requests.exceptions.RequestException as e:
|
|
447
|
+
logger.exception(e)
|
|
448
|
+
error = "Connection refused"
|
|
449
|
+
json_data = None
|
|
450
|
+
|
|
451
|
+
return json_data, error
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
class ElasticSearch(BaseElasticSearch):
|
|
455
|
+
@classmethod
|
|
456
|
+
def enabled(cls):
|
|
457
|
+
return True
|
|
458
|
+
|
|
459
|
+
@classmethod
|
|
460
|
+
def name(cls):
|
|
461
|
+
return "Elasticsearch"
|
|
462
|
+
|
|
463
|
+
def run_query(self, query, user):
|
|
464
|
+
try:
|
|
465
|
+
error = None
|
|
466
|
+
|
|
467
|
+
logger.debug(query)
|
|
468
|
+
query_dict = json_loads(query)
|
|
469
|
+
|
|
470
|
+
index_name = query_dict.pop("index", "")
|
|
471
|
+
result_fields = query_dict.pop("result_fields", None)
|
|
472
|
+
|
|
473
|
+
if not self.server_url:
|
|
474
|
+
error = "Missing configuration key 'server'"
|
|
475
|
+
return None, error
|
|
476
|
+
|
|
477
|
+
url = "{0}/{1}/_search".format(self.server_url, index_name)
|
|
478
|
+
mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name)
|
|
479
|
+
|
|
480
|
+
mappings, error = self._get_query_mappings(mapping_url)
|
|
481
|
+
if error:
|
|
482
|
+
return None, error
|
|
483
|
+
|
|
484
|
+
logger.debug("Using URL: %s", url)
|
|
485
|
+
logger.debug("Using query: %s", query_dict)
|
|
486
|
+
r = requests.get(url, json=query_dict, auth=self.auth)
|
|
487
|
+
r.raise_for_status()
|
|
488
|
+
logger.debug("Result: %s", r.json())
|
|
489
|
+
|
|
490
|
+
result_columns = []
|
|
491
|
+
result_rows = []
|
|
492
|
+
self._parse_results(
|
|
493
|
+
mappings, result_fields, r.json(), result_columns, result_rows
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
|
|
497
|
+
except (KeyboardInterrupt, JobTimeoutException):
|
|
498
|
+
logger.exception(e)
|
|
499
|
+
raise
|
|
500
|
+
except requests.HTTPError as e:
|
|
501
|
+
logger.exception(e)
|
|
502
|
+
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(
|
|
503
|
+
r.status_code, r.text
|
|
504
|
+
)
|
|
505
|
+
json_data = None
|
|
506
|
+
except requests.exceptions.RequestException as e:
|
|
507
|
+
logger.exception(e)
|
|
508
|
+
error = "Connection refused"
|
|
509
|
+
json_data = None
|
|
510
|
+
|
|
511
|
+
return json_data, error
|
|
512
|
+
|
|
513
|
+
|
|
514
|
+
register(Kibana)
|
|
515
|
+
register(ElasticSearch)
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import binascii
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
import decimal
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
|
|
8
|
+
import uuid
|
|
9
|
+
|
|
10
|
+
from sqlalchemy.orm.query import Query
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class JSONEncoder(json.JSONEncoder):
|
|
14
|
+
"""Adapter for `json.dumps`."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, **kwargs):
|
|
17
|
+
from redash.query_runner import query_runners
|
|
18
|
+
|
|
19
|
+
self.encoders = [r.custom_json_encoder for r in query_runners.values() if hasattr(r, "custom_json_encoder")]
|
|
20
|
+
super().__init__(**kwargs)
|
|
21
|
+
|
|
22
|
+
def default(self, o):
|
|
23
|
+
for encoder in self.encoders:
|
|
24
|
+
result = encoder(self, o)
|
|
25
|
+
if result:
|
|
26
|
+
return result
|
|
27
|
+
if isinstance(o, Query):
|
|
28
|
+
result = list(o)
|
|
29
|
+
elif isinstance(o, decimal.Decimal):
|
|
30
|
+
result = float(o)
|
|
31
|
+
elif isinstance(o, (datetime.timedelta, uuid.UUID)):
|
|
32
|
+
result = str(o)
|
|
33
|
+
# See "Date Time String Format" in the ECMA-262 specification.
|
|
34
|
+
elif isinstance(o, datetime.datetime):
|
|
35
|
+
result = o.isoformat()
|
|
36
|
+
if o.microsecond:
|
|
37
|
+
result = result[:23] + result[26:]
|
|
38
|
+
if result.endswith("+00:00"):
|
|
39
|
+
result = result[:-6] + "Z"
|
|
40
|
+
elif isinstance(o, datetime.date):
|
|
41
|
+
result = o.isoformat()
|
|
42
|
+
elif isinstance(o, datetime.time):
|
|
43
|
+
if o.utcoffset() is not None:
|
|
44
|
+
raise ValueError("JSON can't represent timezone-aware times.")
|
|
45
|
+
result = o.isoformat()
|
|
46
|
+
if o.microsecond:
|
|
47
|
+
result = result[:12]
|
|
48
|
+
elif isinstance(o, memoryview):
|
|
49
|
+
result = binascii.hexlify(o).decode()
|
|
50
|
+
elif isinstance(o, bytes):
|
|
51
|
+
result = binascii.hexlify(o).decode()
|
|
52
|
+
else:
|
|
53
|
+
result = super().default(o)
|
|
54
|
+
return result
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def json_loads(data, *args, **kwargs):
|
|
58
|
+
"""A custom JSON loading function which passes all parameters to the
|
|
59
|
+
json.loads function."""
|
|
60
|
+
return json.loads(data, *args, **kwargs)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def json_dumps(data, *args, **kwargs):
|
|
64
|
+
"""A custom JSON dumping function which passes all parameters to the
|
|
65
|
+
json.dumps function."""
|
|
66
|
+
kwargs.setdefault("cls", JSONEncoder)
|
|
67
|
+
kwargs.setdefault("ensure_ascii", False)
|
|
68
|
+
# Float value nan or inf in Python should be render to None or null in json.
|
|
69
|
+
# Using allow_nan = True will make Python render nan as NaN, leading to parse error in front-end
|
|
70
|
+
kwargs.setdefault("allow_nan", False)
|
|
71
|
+
return json.dumps(data, *args, **kwargs)
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '0.0.27'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/access_log_service.py
RENAMED
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/auth_client_service.py
RENAMED
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/auth_permission_service.py
RENAMED
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/service/authentication_service.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/json/NotSortJSONProvider.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/swagger/swagger_template.py
RENAMED
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask/utils/swagger/swagger_util.py
RENAMED
|
File without changes
|
|
File without changes
|
{lesscode_flask-0.0.27 → lesscode_flask-0.0.31}/lesscode_flask.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|