lesscode-flask 0.0.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lesscode_flask/__init__.py +1 -0
- lesscode_flask/app.py +156 -0
- lesscode_flask/db/__init__.py +60 -0
- lesscode_flask/db/datasource.py +27 -0
- lesscode_flask/db/executor.py +128 -0
- lesscode_flask/log/access_log_handler.py +62 -0
- lesscode_flask/model/access_log.py +26 -0
- lesscode_flask/model/auth_client.py +42 -0
- lesscode_flask/model/auth_permission.py +25 -0
- lesscode_flask/model/base_model.py +38 -0
- lesscode_flask/model/parameterized_query.py +210 -0
- lesscode_flask/model/response_result.py +60 -0
- lesscode_flask/model/user.py +118 -0
- lesscode_flask/service/access_log_service.py +8 -0
- lesscode_flask/service/auth_client_service.py +7 -0
- lesscode_flask/service/auth_permission_service.py +7 -0
- lesscode_flask/service/authentication_service.py +67 -0
- lesscode_flask/service/base_service.py +138 -0
- lesscode_flask/setting/__init__.py +122 -0
- lesscode_flask/setup/__init__.py +185 -0
- lesscode_flask/utils/__init__.py +1 -0
- lesscode_flask/utils/decorator/__init__.py +0 -0
- lesscode_flask/utils/decorator/cache.py +126 -0
- lesscode_flask/utils/decorator/swagger.py +19 -0
- lesscode_flask/utils/file/file_exporter.py +98 -0
- lesscode_flask/utils/helpers.py +139 -0
- lesscode_flask/utils/json/NotSortJSONProvider.py +9 -0
- lesscode_flask/utils/oss/__init__.py +0 -0
- lesscode_flask/utils/oss/ks3_oss.py +203 -0
- lesscode_flask/utils/redis/redis_helper.py +117 -0
- lesscode_flask/utils/request/request.py +96 -0
- lesscode_flask/utils/swagger/swagger_template.py +82 -0
- lesscode_flask/utils/swagger/swagger_util.py +172 -0
- lesscode_flask/wsgi.py +37 -0
- lesscode_flask-0.0.27.dist-info/METADATA +127 -0
- lesscode_flask-0.0.27.dist-info/RECORD +46 -0
- lesscode_flask-0.0.27.dist-info/WHEEL +5 -0
- lesscode_flask-0.0.27.dist-info/top_level.txt +2 -0
- redash/query_runner/__init__.py +523 -0
- redash/query_runner/clickhouse.py +230 -0
- redash/query_runner/kingbase.py +228 -0
- redash/query_runner/mysql.py +309 -0
- redash/query_runner/pg.py +284 -0
- redash/settings/__init__.py +90 -0
- redash/settings/helpers.py +66 -0
- redash/utils/requests_session.py +18 -0
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import threading
|
|
4
|
+
import traceback
|
|
5
|
+
|
|
6
|
+
from redash.query_runner import (
|
|
7
|
+
TYPE_DATE,
|
|
8
|
+
TYPE_DATETIME,
|
|
9
|
+
TYPE_FLOAT,
|
|
10
|
+
TYPE_INTEGER,
|
|
11
|
+
TYPE_STRING,
|
|
12
|
+
BaseSQLQueryRunner,
|
|
13
|
+
InterruptException,
|
|
14
|
+
JobTimeoutException,
|
|
15
|
+
register,
|
|
16
|
+
)
|
|
17
|
+
from redash.settings import parse_boolean
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
import pymysql
|
|
21
|
+
enabled = True
|
|
22
|
+
except ImportError:
|
|
23
|
+
logging.error(traceback.format_exc())
|
|
24
|
+
logging.error(f"pymysql is not exist,run:pip install pymysql==1.0.1")
|
|
25
|
+
enabled = False
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
types_map = {
|
|
29
|
+
0: TYPE_FLOAT,
|
|
30
|
+
1: TYPE_INTEGER,
|
|
31
|
+
2: TYPE_INTEGER,
|
|
32
|
+
3: TYPE_INTEGER,
|
|
33
|
+
4: TYPE_FLOAT,
|
|
34
|
+
5: TYPE_FLOAT,
|
|
35
|
+
7: TYPE_DATETIME,
|
|
36
|
+
8: TYPE_INTEGER,
|
|
37
|
+
9: TYPE_INTEGER,
|
|
38
|
+
10: TYPE_DATE,
|
|
39
|
+
12: TYPE_DATETIME,
|
|
40
|
+
15: TYPE_STRING,
|
|
41
|
+
16: TYPE_INTEGER,
|
|
42
|
+
246: TYPE_FLOAT,
|
|
43
|
+
253: TYPE_STRING,
|
|
44
|
+
254: TYPE_STRING,
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class Result:
|
|
49
|
+
def __init__(self):
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Mysql(BaseSQLQueryRunner):
|
|
54
|
+
noop_query = "SELECT 1"
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def configuration_schema(cls):
|
|
58
|
+
show_ssl_settings = parse_boolean(os.environ.get("MYSQL_SHOW_SSL_SETTINGS", "true"))
|
|
59
|
+
|
|
60
|
+
schema = {
|
|
61
|
+
"type": "object",
|
|
62
|
+
"properties": {
|
|
63
|
+
"host": {"type": "string", "default": "127.0.0.1"},
|
|
64
|
+
"user": {"type": "string"},
|
|
65
|
+
"passwd": {"type": "string", "title": "Password"},
|
|
66
|
+
"db": {"type": "string", "title": "Database name"},
|
|
67
|
+
"port": {"type": "number", "default": 3306},
|
|
68
|
+
"connect_timeout": {"type": "number", "default": 60, "title": "Connection Timeout"},
|
|
69
|
+
"charset": {"type": "string", "default": "utf8"},
|
|
70
|
+
"use_unicode": {"type": "boolean", "default": True},
|
|
71
|
+
"autocommit": {"type": "boolean", "default": False},
|
|
72
|
+
},
|
|
73
|
+
"order": [
|
|
74
|
+
"host",
|
|
75
|
+
"port",
|
|
76
|
+
"user",
|
|
77
|
+
"passwd",
|
|
78
|
+
"db",
|
|
79
|
+
"connect_timeout",
|
|
80
|
+
"charset",
|
|
81
|
+
"use_unicode",
|
|
82
|
+
"autocommit",
|
|
83
|
+
],
|
|
84
|
+
"required": ["db"],
|
|
85
|
+
"secret": ["passwd"],
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if show_ssl_settings:
|
|
89
|
+
schema["properties"].update(
|
|
90
|
+
{
|
|
91
|
+
"ssl_mode": {
|
|
92
|
+
"type": "string",
|
|
93
|
+
"title": "SSL Mode",
|
|
94
|
+
"default": "preferred",
|
|
95
|
+
"extendedEnum": [
|
|
96
|
+
{"value": "disabled", "name": "Disabled"},
|
|
97
|
+
{"value": "preferred", "name": "Preferred"},
|
|
98
|
+
{"value": "required", "name": "Required"},
|
|
99
|
+
{"value": "verify-ca", "name": "Verify CA"},
|
|
100
|
+
{"value": "verify-identity", "name": "Verify Identity"},
|
|
101
|
+
],
|
|
102
|
+
},
|
|
103
|
+
"use_ssl": {"type": "boolean", "title": "Use SSL"},
|
|
104
|
+
"ssl_cacert": {
|
|
105
|
+
"type": "string",
|
|
106
|
+
"title": "Path to CA certificate file to verify peer against (SSL)",
|
|
107
|
+
},
|
|
108
|
+
"ssl_cert": {
|
|
109
|
+
"type": "string",
|
|
110
|
+
"title": "Path to client certificate file (SSL)",
|
|
111
|
+
},
|
|
112
|
+
"ssl_key": {
|
|
113
|
+
"type": "string",
|
|
114
|
+
"title": "Path to private key file (SSL)",
|
|
115
|
+
},
|
|
116
|
+
}
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
return schema
|
|
120
|
+
|
|
121
|
+
@classmethod
|
|
122
|
+
def name(cls):
|
|
123
|
+
return "MySQL"
|
|
124
|
+
|
|
125
|
+
@classmethod
|
|
126
|
+
def enabled(cls):
|
|
127
|
+
return enabled
|
|
128
|
+
|
|
129
|
+
def _connection(self):
|
|
130
|
+
params = dict(
|
|
131
|
+
host=self.configuration.get("host", ""),
|
|
132
|
+
user=self.configuration.get("user", ""),
|
|
133
|
+
passwd=self.configuration.get("passwd", ""),
|
|
134
|
+
db=self.configuration["db"],
|
|
135
|
+
port=self.configuration.get("port", 3306),
|
|
136
|
+
charset=self.configuration.get("charset", "utf8"),
|
|
137
|
+
use_unicode=self.configuration.get("use_unicode", True),
|
|
138
|
+
connect_timeout=self.configuration.get("connect_timeout", 60),
|
|
139
|
+
autocommit=self.configuration.get("autocommit", True),
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
ssl_options = self._get_ssl_parameters()
|
|
143
|
+
|
|
144
|
+
if ssl_options:
|
|
145
|
+
params["ssl"] = ssl_options
|
|
146
|
+
|
|
147
|
+
connection = pymysql.connect(**params)
|
|
148
|
+
|
|
149
|
+
return connection
|
|
150
|
+
|
|
151
|
+
def _get_tables(self, schema):
|
|
152
|
+
query = """
|
|
153
|
+
SELECT col.table_schema as table_schema,
|
|
154
|
+
col.table_name as table_name,
|
|
155
|
+
col.column_name as column_name
|
|
156
|
+
FROM `information_schema`.`columns` col
|
|
157
|
+
WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
results, error = self.run_query(query, None)
|
|
161
|
+
|
|
162
|
+
if error is not None:
|
|
163
|
+
self._handle_run_query_error(error)
|
|
164
|
+
|
|
165
|
+
for row in results["rows"]:
|
|
166
|
+
if row["table_schema"] != self.configuration["db"]:
|
|
167
|
+
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
|
168
|
+
else:
|
|
169
|
+
table_name = row["table_name"]
|
|
170
|
+
|
|
171
|
+
if table_name not in schema:
|
|
172
|
+
schema[table_name] = {"name": table_name, "columns": []}
|
|
173
|
+
|
|
174
|
+
schema[table_name]["columns"].append(row["column_name"])
|
|
175
|
+
|
|
176
|
+
return list(schema.values())
|
|
177
|
+
|
|
178
|
+
def run_query(self, query, user):
|
|
179
|
+
ev = threading.Event()
|
|
180
|
+
thread_id = ""
|
|
181
|
+
r = Result()
|
|
182
|
+
t = None
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
connection = self._connection()
|
|
186
|
+
self._run_query(query, user, connection, r, ev)
|
|
187
|
+
|
|
188
|
+
# thread_id = connection.thread_id()
|
|
189
|
+
# t = threading.Thread(target=self._run_query, args=(query, user, connection, r, ev))
|
|
190
|
+
# t.start()
|
|
191
|
+
# while not ev.wait(1):
|
|
192
|
+
# pass
|
|
193
|
+
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
|
194
|
+
self._cancel(thread_id)
|
|
195
|
+
# t.join()
|
|
196
|
+
raise
|
|
197
|
+
|
|
198
|
+
return r.data, r.error
|
|
199
|
+
|
|
200
|
+
def _run_query(self, query, user, connection, r, ev):
|
|
201
|
+
try:
|
|
202
|
+
cursor = connection.cursor()
|
|
203
|
+
logger.debug("MySQL running query: %s", query)
|
|
204
|
+
cursor.execute(query)
|
|
205
|
+
|
|
206
|
+
data = cursor.fetchall()
|
|
207
|
+
desc = cursor.description
|
|
208
|
+
|
|
209
|
+
while cursor.nextset():
|
|
210
|
+
if cursor.description is not None:
|
|
211
|
+
data = cursor.fetchall()
|
|
212
|
+
desc = cursor.description
|
|
213
|
+
|
|
214
|
+
# TODO - very similar to pg.py
|
|
215
|
+
if desc is not None:
|
|
216
|
+
columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in desc])
|
|
217
|
+
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
|
218
|
+
|
|
219
|
+
data = {"columns": columns, "rows": rows}
|
|
220
|
+
r.data = data
|
|
221
|
+
r.error = None
|
|
222
|
+
else:
|
|
223
|
+
r.data = None
|
|
224
|
+
r.error = "No data was returned."
|
|
225
|
+
|
|
226
|
+
cursor.close()
|
|
227
|
+
except pymysql.Error as e:
|
|
228
|
+
if cursor:
|
|
229
|
+
cursor.close()
|
|
230
|
+
r.data = None
|
|
231
|
+
r.error = e.args[1]
|
|
232
|
+
finally:
|
|
233
|
+
ev.set()
|
|
234
|
+
if connection:
|
|
235
|
+
connection.close()
|
|
236
|
+
|
|
237
|
+
def _get_ssl_parameters(self):
|
|
238
|
+
if not self.configuration.get("use_ssl"):
|
|
239
|
+
return None
|
|
240
|
+
|
|
241
|
+
ssl_params = {}
|
|
242
|
+
|
|
243
|
+
if self.configuration.get("use_ssl"):
|
|
244
|
+
config_map = {"ssl_mode": "preferred", "ssl_cacert": "ca", "ssl_cert": "cert", "ssl_key": "key"}
|
|
245
|
+
for key, cfg in config_map.items():
|
|
246
|
+
val = self.configuration.get(key)
|
|
247
|
+
if val:
|
|
248
|
+
ssl_params[cfg] = val
|
|
249
|
+
|
|
250
|
+
return ssl_params
|
|
251
|
+
|
|
252
|
+
def _cancel(self, thread_id):
|
|
253
|
+
connection = None
|
|
254
|
+
cursor = None
|
|
255
|
+
error = None
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
connection = self._connection()
|
|
259
|
+
cursor = connection.cursor()
|
|
260
|
+
query = "KILL %d" % (thread_id)
|
|
261
|
+
logging.debug(query)
|
|
262
|
+
cursor.execute(query)
|
|
263
|
+
except pymysql.Error as e:
|
|
264
|
+
if cursor:
|
|
265
|
+
cursor.close()
|
|
266
|
+
error = e.args[1]
|
|
267
|
+
finally:
|
|
268
|
+
if connection:
|
|
269
|
+
connection.close()
|
|
270
|
+
|
|
271
|
+
return error
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
class RDSMySQL(Mysql):
|
|
275
|
+
@classmethod
|
|
276
|
+
def name(cls):
|
|
277
|
+
return "MySQL (Amazon RDS)"
|
|
278
|
+
|
|
279
|
+
@classmethod
|
|
280
|
+
def type(cls):
|
|
281
|
+
return "rds_mysql"
|
|
282
|
+
|
|
283
|
+
@classmethod
|
|
284
|
+
def configuration_schema(cls):
|
|
285
|
+
return {
|
|
286
|
+
"type": "object",
|
|
287
|
+
"properties": {
|
|
288
|
+
"host": {"type": "string"},
|
|
289
|
+
"user": {"type": "string"},
|
|
290
|
+
"passwd": {"type": "string", "title": "Password"},
|
|
291
|
+
"db": {"type": "string", "title": "Database name"},
|
|
292
|
+
"port": {"type": "number", "default": 3306},
|
|
293
|
+
"use_ssl": {"type": "boolean", "title": "Use SSL"},
|
|
294
|
+
},
|
|
295
|
+
"order": ["host", "port", "user", "passwd", "db"],
|
|
296
|
+
"required": ["db", "user", "passwd", "host"],
|
|
297
|
+
"secret": ["passwd"],
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
def _get_ssl_parameters(self):
|
|
301
|
+
if self.configuration.get("use_ssl"):
|
|
302
|
+
ca_path = os.path.join(os.path.dirname(__file__), "files/rds-combined-ca-bundle.pem")
|
|
303
|
+
return {"ca": ca_path}
|
|
304
|
+
|
|
305
|
+
return None
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
register(Mysql)
|
|
309
|
+
# register(RDSMySQL)
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import select
|
|
4
|
+
import traceback
|
|
5
|
+
from base64 import b64decode
|
|
6
|
+
from tempfile import NamedTemporaryFile
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pg8000
|
|
10
|
+
|
|
11
|
+
enabled = True
|
|
12
|
+
except ImportError:
|
|
13
|
+
enabled = False
|
|
14
|
+
logging.error(f"pg8000 is not exist,run:pip install pg8000==1.31.2")
|
|
15
|
+
|
|
16
|
+
from redash.query_runner import (
|
|
17
|
+
TYPE_BOOLEAN,
|
|
18
|
+
TYPE_DATE,
|
|
19
|
+
TYPE_DATETIME,
|
|
20
|
+
TYPE_FLOAT,
|
|
21
|
+
TYPE_INTEGER,
|
|
22
|
+
TYPE_STRING,
|
|
23
|
+
BaseSQLQueryRunner,
|
|
24
|
+
InterruptException,
|
|
25
|
+
JobTimeoutException,
|
|
26
|
+
register,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
types_map = {
|
|
32
|
+
20: TYPE_INTEGER,
|
|
33
|
+
21: TYPE_INTEGER,
|
|
34
|
+
23: TYPE_INTEGER,
|
|
35
|
+
700: TYPE_FLOAT,
|
|
36
|
+
1700: TYPE_FLOAT,
|
|
37
|
+
701: TYPE_FLOAT,
|
|
38
|
+
16: TYPE_BOOLEAN,
|
|
39
|
+
1082: TYPE_DATE,
|
|
40
|
+
1182: TYPE_DATE,
|
|
41
|
+
1114: TYPE_DATETIME,
|
|
42
|
+
1184: TYPE_DATETIME,
|
|
43
|
+
1115: TYPE_DATETIME,
|
|
44
|
+
1185: TYPE_DATETIME,
|
|
45
|
+
1014: TYPE_STRING,
|
|
46
|
+
1015: TYPE_STRING,
|
|
47
|
+
1008: TYPE_STRING,
|
|
48
|
+
1009: TYPE_STRING,
|
|
49
|
+
2951: TYPE_STRING,
|
|
50
|
+
1043: TYPE_STRING,
|
|
51
|
+
1002: TYPE_STRING,
|
|
52
|
+
1003: TYPE_STRING,
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def full_table_name(schema, name):
|
|
57
|
+
if "." in name:
|
|
58
|
+
name = '"{}"'.format(name)
|
|
59
|
+
|
|
60
|
+
return "{}.{}".format(schema, name)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def build_schema(query_result, schema):
|
|
64
|
+
# By default we omit the public schema name from the table name. But there are
|
|
65
|
+
# edge cases, where this might cause conflicts. For example:
|
|
66
|
+
# * We have a schema named "main" with table "users".
|
|
67
|
+
# * We have a table named "main.users" in the public schema.
|
|
68
|
+
# (while this feels unlikely, this actually happened)
|
|
69
|
+
# In this case if we omit the schema name for the public table, we will have
|
|
70
|
+
# a conflict.
|
|
71
|
+
table_names = set(
|
|
72
|
+
map(
|
|
73
|
+
lambda r: full_table_name(r["table_schema"], r["table_name"]),
|
|
74
|
+
query_result["rows"],
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
for row in query_result["rows"]:
|
|
79
|
+
if row["table_schema"] != "public":
|
|
80
|
+
table_name = full_table_name(row["table_schema"], row["table_name"])
|
|
81
|
+
else:
|
|
82
|
+
if row["table_name"] in table_names:
|
|
83
|
+
table_name = full_table_name(row["table_schema"], row["table_name"])
|
|
84
|
+
else:
|
|
85
|
+
table_name = row["table_name"]
|
|
86
|
+
|
|
87
|
+
if table_name not in schema:
|
|
88
|
+
schema[table_name] = {"name": table_name, "columns": []}
|
|
89
|
+
|
|
90
|
+
column = row["column_name"]
|
|
91
|
+
if row.get("data_type") is not None:
|
|
92
|
+
column = {"name": row["column_name"], "type": row["data_type"]}
|
|
93
|
+
|
|
94
|
+
schema[table_name]["columns"].append(column)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _create_cert_file(configuration, key, ssl_config):
|
|
98
|
+
file_key = key + "File"
|
|
99
|
+
if file_key in configuration:
|
|
100
|
+
with NamedTemporaryFile(mode="w", delete=False) as cert_file:
|
|
101
|
+
cert_bytes = b64decode(configuration[file_key])
|
|
102
|
+
cert_file.write(cert_bytes.decode("utf-8"))
|
|
103
|
+
|
|
104
|
+
ssl_config[key] = cert_file.name
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _cleanup_ssl_certs(ssl_config):
|
|
108
|
+
for k, v in ssl_config.items():
|
|
109
|
+
if k != "sslmode":
|
|
110
|
+
os.remove(v)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _get_ssl_config(configuration):
|
|
114
|
+
ssl_config = {"sslmode": configuration.get("sslmode", "prefer")}
|
|
115
|
+
|
|
116
|
+
_create_cert_file(configuration, "sslrootcert", ssl_config)
|
|
117
|
+
_create_cert_file(configuration, "sslcert", ssl_config)
|
|
118
|
+
_create_cert_file(configuration, "sslkey", ssl_config)
|
|
119
|
+
|
|
120
|
+
return ssl_config
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class PostgreSQL(BaseSQLQueryRunner):
|
|
124
|
+
noop_query = "SELECT 1"
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
def configuration_schema(cls):
|
|
128
|
+
return {
|
|
129
|
+
"type": "object",
|
|
130
|
+
"properties": {
|
|
131
|
+
"user": {"type": "string"},
|
|
132
|
+
"password": {"type": "string"},
|
|
133
|
+
"host": {"type": "string", "default": "127.0.0.1"},
|
|
134
|
+
"port": {"type": "number", "default": 5432},
|
|
135
|
+
"dbname": {"type": "string", "title": "Database Name"},
|
|
136
|
+
"sslmode": {
|
|
137
|
+
"type": "string",
|
|
138
|
+
"title": "SSL Mode",
|
|
139
|
+
"default": "prefer",
|
|
140
|
+
"extendedEnum": [
|
|
141
|
+
{"value": "disable", "name": "Disable"},
|
|
142
|
+
{"value": "allow", "name": "Allow"},
|
|
143
|
+
{"value": "prefer", "name": "Prefer"},
|
|
144
|
+
{"value": "require", "name": "Require"},
|
|
145
|
+
{"value": "verify-ca", "name": "Verify CA"},
|
|
146
|
+
{"value": "verify-full", "name": "Verify Full"},
|
|
147
|
+
],
|
|
148
|
+
},
|
|
149
|
+
"sslrootcertFile": {"type": "string", "title": "SSL Root Certificate"},
|
|
150
|
+
"sslcertFile": {"type": "string", "title": "SSL Client Certificate"},
|
|
151
|
+
"sslkeyFile": {"type": "string", "title": "SSL Client Key"},
|
|
152
|
+
},
|
|
153
|
+
"order": ["host", "port", "user", "password"],
|
|
154
|
+
"required": ["dbname"],
|
|
155
|
+
"secret": ["password", "sslrootcertFile", "sslcertFile", "sslkeyFile"],
|
|
156
|
+
"extra_options": [
|
|
157
|
+
"sslmode",
|
|
158
|
+
"sslrootcertFile",
|
|
159
|
+
"sslcertFile",
|
|
160
|
+
"sslkeyFile",
|
|
161
|
+
],
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
@classmethod
|
|
165
|
+
def type(cls):
|
|
166
|
+
return "pg"
|
|
167
|
+
|
|
168
|
+
@classmethod
|
|
169
|
+
def enabled(cls):
|
|
170
|
+
return enabled
|
|
171
|
+
|
|
172
|
+
# @classmethod
|
|
173
|
+
# def custom_json_encoder(cls, dec, o):
|
|
174
|
+
# if isinstance(o, Range):
|
|
175
|
+
# # From: https://github.com/psycopg/pg8000/pull/779
|
|
176
|
+
# if o._bounds is None:
|
|
177
|
+
# return ""
|
|
178
|
+
#
|
|
179
|
+
# items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
|
|
180
|
+
#
|
|
181
|
+
# return "".join(items)
|
|
182
|
+
# return None
|
|
183
|
+
|
|
184
|
+
def _get_definitions(self, schema, query):
|
|
185
|
+
results, error = self.run_query(query, None)
|
|
186
|
+
|
|
187
|
+
if error is not None:
|
|
188
|
+
self._handle_run_query_error(error)
|
|
189
|
+
|
|
190
|
+
build_schema(results, schema)
|
|
191
|
+
|
|
192
|
+
def _get_tables(self, schema):
|
|
193
|
+
"""
|
|
194
|
+
relkind constants per https://www.postgresql.org/docs/10/static/catalog-pg-class.html
|
|
195
|
+
r = regular table
|
|
196
|
+
v = view
|
|
197
|
+
m = materialized view
|
|
198
|
+
f = foreign table
|
|
199
|
+
p = partitioned table (new in 10)
|
|
200
|
+
---
|
|
201
|
+
i = index
|
|
202
|
+
S = sequence
|
|
203
|
+
t = TOAST table
|
|
204
|
+
c = composite type
|
|
205
|
+
"""
|
|
206
|
+
|
|
207
|
+
query = """
|
|
208
|
+
SELECT s.nspname as table_schema,
|
|
209
|
+
c.relname as table_name,
|
|
210
|
+
a.attname as column_name,
|
|
211
|
+
null as data_type
|
|
212
|
+
FROM pg_class c
|
|
213
|
+
JOIN pg_namespace s
|
|
214
|
+
ON c.relnamespace = s.oid
|
|
215
|
+
AND s.nspname NOT IN ('pg_catalog', 'information_schema')
|
|
216
|
+
JOIN pg_attribute a
|
|
217
|
+
ON a.attrelid = c.oid
|
|
218
|
+
AND a.attnum > 0
|
|
219
|
+
AND NOT a.attisdropped
|
|
220
|
+
WHERE c.relkind IN ('m', 'f', 'p')
|
|
221
|
+
AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
|
222
|
+
AND has_schema_privilege(s.nspname, 'usage')
|
|
223
|
+
|
|
224
|
+
UNION
|
|
225
|
+
|
|
226
|
+
SELECT table_schema,
|
|
227
|
+
table_name,
|
|
228
|
+
column_name,
|
|
229
|
+
data_type
|
|
230
|
+
FROM information_schema.columns
|
|
231
|
+
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
self._get_definitions(schema, query)
|
|
235
|
+
|
|
236
|
+
return list(schema.values())
|
|
237
|
+
|
|
238
|
+
def _get_connection(self):
|
|
239
|
+
# self.ssl_config = _get_ssl_config(self.configuration)
|
|
240
|
+
params = dict(
|
|
241
|
+
host=self.configuration.get("host", ""),
|
|
242
|
+
user=self.configuration.get("user", ""),
|
|
243
|
+
password=self.configuration.get("passwd", ""),
|
|
244
|
+
database=self.configuration["db"],
|
|
245
|
+
port=self.configuration.get("port", 54321),
|
|
246
|
+
# **self.ssl_config,
|
|
247
|
+
)
|
|
248
|
+
connection = pg8000.connect(**params)
|
|
249
|
+
return connection
|
|
250
|
+
|
|
251
|
+
def run_query(self, query, user):
|
|
252
|
+
connection = self._get_connection()
|
|
253
|
+
cursor = connection.cursor()
|
|
254
|
+
|
|
255
|
+
try:
|
|
256
|
+
cursor.execute(query)
|
|
257
|
+
# _wait(connection)
|
|
258
|
+
|
|
259
|
+
if cursor.description is not None:
|
|
260
|
+
columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
|
|
261
|
+
rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
|
|
262
|
+
|
|
263
|
+
data = {"columns": columns, "rows": rows}
|
|
264
|
+
error = None
|
|
265
|
+
else:
|
|
266
|
+
error = "Query completed but it returned no data."
|
|
267
|
+
data = None
|
|
268
|
+
except (select.error, OSError):
|
|
269
|
+
error = "Query interrupted. Please retry."
|
|
270
|
+
data = None
|
|
271
|
+
except pg8000.DatabaseError as e:
|
|
272
|
+
error = str(e)
|
|
273
|
+
data = None
|
|
274
|
+
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
|
275
|
+
connection.cancel()
|
|
276
|
+
raise
|
|
277
|
+
finally:
|
|
278
|
+
connection.close()
|
|
279
|
+
# _cleanup_ssl_certs(self.ssl_config)
|
|
280
|
+
|
|
281
|
+
return data, error
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
register(PostgreSQL)
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from redash.settings.helpers import parse_boolean, array_from_string
|
|
4
|
+
from funcy import distinct, remove
|
|
5
|
+
|
|
6
|
+
# Whether api calls using the json query runner will block private addresses
|
|
7
|
+
ENFORCE_PRIVATE_ADDRESS_BLOCK = parse_boolean(os.environ.get("REDASH_ENFORCE_PRIVATE_IP_BLOCK", "true"))
|
|
8
|
+
# requests
|
|
9
|
+
REQUESTS_ALLOW_REDIRECTS = parse_boolean(os.environ.get("REDASH_REQUESTS_ALLOW_REDIRECTS", "false"))
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Query Runners
|
|
13
|
+
default_query_runners = [
|
|
14
|
+
# "redash.query_runner.athena",
|
|
15
|
+
# "redash.query_runner.big_query",
|
|
16
|
+
# "redash.query_runner.google_spreadsheets",
|
|
17
|
+
# "redash.query_runner.graphite",
|
|
18
|
+
# "redash.query_runner.mongodb",
|
|
19
|
+
# "redash.query_runner.couchbase",
|
|
20
|
+
"redash.query_runner.mysql",
|
|
21
|
+
"redash.query_runner.pg",
|
|
22
|
+
"redash.query_runner.kingbase",
|
|
23
|
+
# "redash.query_runner.url",
|
|
24
|
+
# "redash.query_runner.influx_db",
|
|
25
|
+
# "redash.query_runner.influx_db_v2",
|
|
26
|
+
# "redash.query_runner.elasticsearch",
|
|
27
|
+
# "redash.query_runner.elasticsearch2",
|
|
28
|
+
# "redash.query_runner.amazon_elasticsearch",
|
|
29
|
+
# "redash.query_runner.trino",
|
|
30
|
+
# "redash.query_runner.presto",
|
|
31
|
+
# "redash.query_runner.pinot",
|
|
32
|
+
# "redash.query_runner.databricks",
|
|
33
|
+
# "redash.query_runner.hive_ds",
|
|
34
|
+
# "redash.query_runner.impala_ds",
|
|
35
|
+
# "redash.query_runner.vertica",
|
|
36
|
+
"redash.query_runner.clickhouse",
|
|
37
|
+
# "redash.query_runner.tinybird",
|
|
38
|
+
# "redash.query_runner.yandex_metrica",
|
|
39
|
+
# "redash.query_runner.yandex_disk",
|
|
40
|
+
# "redash.query_runner.rockset",
|
|
41
|
+
# "redash.query_runner.treasuredata",
|
|
42
|
+
# "redash.query_runner.sqlite",
|
|
43
|
+
# "redash.query_runner.mssql",
|
|
44
|
+
# "redash.query_runner.mssql_odbc",
|
|
45
|
+
# "redash.query_runner.memsql_ds",
|
|
46
|
+
# "redash.query_runner.jql",
|
|
47
|
+
# "redash.query_runner.google_analytics",
|
|
48
|
+
# "redash.query_runner.axibase_tsd",
|
|
49
|
+
# "redash.query_runner.salesforce",
|
|
50
|
+
# "redash.query_runner.query_results",
|
|
51
|
+
# "redash.query_runner.prometheus",
|
|
52
|
+
# "redash.query_runner.db2",
|
|
53
|
+
# "redash.query_runner.druid",
|
|
54
|
+
# "redash.query_runner.kylin",
|
|
55
|
+
# "redash.query_runner.drill",
|
|
56
|
+
# "redash.query_runner.uptycs",
|
|
57
|
+
# "redash.query_runner.snowflake",
|
|
58
|
+
# "redash.query_runner.phoenix",
|
|
59
|
+
# "redash.query_runner.json_ds",
|
|
60
|
+
# "redash.query_runner.cass",
|
|
61
|
+
# "redash.query_runner.dgraph",
|
|
62
|
+
# "redash.query_runner.azure_kusto",
|
|
63
|
+
# "redash.query_runner.exasol",
|
|
64
|
+
# "redash.query_runner.cloudwatch",
|
|
65
|
+
# "redash.query_runner.cloudwatch_insights",
|
|
66
|
+
# "redash.query_runner.corporate_memory",
|
|
67
|
+
# "redash.query_runner.sparql_endpoint",
|
|
68
|
+
# "redash.query_runner.excel",
|
|
69
|
+
# "redash.query_runner.csv",
|
|
70
|
+
# "redash.query_runner.databend",
|
|
71
|
+
# "redash.query_runner.nz",
|
|
72
|
+
# "redash.query_runner.arango",
|
|
73
|
+
# "redash.query_runner.google_analytics4",
|
|
74
|
+
# "redash.query_runner.google_search_console",
|
|
75
|
+
# "redash.query_runner.ignite",
|
|
76
|
+
# "redash.query_runner.oracle",
|
|
77
|
+
# "redash.query_runner.e6data",
|
|
78
|
+
# "redash.query_runner.risingwave",
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
enabled_query_runners = array_from_string(
|
|
82
|
+
os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join(default_query_runners))
|
|
83
|
+
)
|
|
84
|
+
additional_query_runners = array_from_string(os.environ.get("REDASH_ADDITIONAL_QUERY_RUNNERS", ""))
|
|
85
|
+
disabled_query_runners = array_from_string(os.environ.get("REDASH_DISABLED_QUERY_RUNNERS", ""))
|
|
86
|
+
|
|
87
|
+
QUERY_RUNNERS = remove(
|
|
88
|
+
set(disabled_query_runners),
|
|
89
|
+
distinct(enabled_query_runners + additional_query_runners),
|
|
90
|
+
)
|