@dataramen/cli 0.0.10 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/run.js +2 -169
- package/dist/code/cli.js +2 -169
- package/dist/code/proxy.js +6 -6
- package/dist/package.json +1 -1
- package/package.json +7 -2
- package/dist/code/api/chat/router.js +0 -55
- package/dist/code/api/dataSources/router.js +0 -147
- package/dist/code/api/dataSources/types.js +0 -2
- package/dist/code/api/dataSources/validators.js +0 -22
- package/dist/code/api/project/router.js +0 -100
- package/dist/code/api/queries/router.js +0 -122
- package/dist/code/api/runner/router.js +0 -22
- package/dist/code/api/status/router.js +0 -17
- package/dist/code/api/teams/router.js +0 -35
- package/dist/code/api/userSettings/router.js +0 -54
- package/dist/code/api/users/router.js +0 -91
- package/dist/code/api/workbooks/router.js +0 -123
- package/dist/code/api/workbooks/types.js +0 -2
- package/dist/code/env.js +0 -25
- package/dist/code/index.js +0 -86
- package/dist/code/repository/db.js +0 -58
- package/dist/code/repository/tables/databaseInspection.js +0 -40
- package/dist/code/repository/tables/datasource.js +0 -86
- package/dist/code/repository/tables/query.js +0 -50
- package/dist/code/repository/tables/teams.js +0 -48
- package/dist/code/repository/tables/userSettings.js +0 -39
- package/dist/code/repository/tables/users.js +0 -42
- package/dist/code/repository/tables/workbook.js +0 -43
- package/dist/code/services/connectorManager/index.js +0 -38
- package/dist/code/services/connectorManager/types.js +0 -2
- package/dist/code/services/files/index.js +0 -44
- package/dist/code/services/mysqlConnector/index.js +0 -180
- package/dist/code/services/oauthClient/oauth2Client.js +0 -10
- package/dist/code/services/openai/index.js +0 -20
- package/dist/code/services/openai/types.js +0 -2
- package/dist/code/services/pgConnector/index.js +0 -220
- package/dist/code/services/userSqlPromptRunner/index.js +0 -207
- package/dist/code/types/connectors.js +0 -2
- package/dist/code/utils/createRouter.js +0 -10
- package/dist/code/utils/httpError.js +0 -13
- package/dist/code/utils/prompts.js +0 -11
- package/dist/code/utils/queryUtils.js +0 -18
- package/dist/code/utils/rawSql.js +0 -32
- package/dist/code/utils/request.js +0 -35
- package/dist/code/utils/token.js +0 -8
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.setupProjectFolders = exports.deleteFile = exports.storeFile = exports.getFile = void 0;
|
|
7
|
-
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
8
|
-
const node_path_1 = require("node:path");
|
|
9
|
-
const node_os_1 = __importDefault(require("node:os"));
|
|
10
|
-
const homeDir = node_os_1.default.homedir();
|
|
11
|
-
const filesPath = (0, node_path_1.join)(homeDir, ".dataramen", ".runtime", "files");
|
|
12
|
-
const getPath = (path) => {
|
|
13
|
-
return (0, node_path_1.join)(filesPath, path);
|
|
14
|
-
};
|
|
15
|
-
const getFile = async (path) => {
|
|
16
|
-
return promises_1.default.readFile(getPath(path), { encoding: "utf8" });
|
|
17
|
-
};
|
|
18
|
-
exports.getFile = getFile;
|
|
19
|
-
const storeFile = async (path, content) => {
|
|
20
|
-
return promises_1.default.writeFile(getPath(path), content, { encoding: "utf8" });
|
|
21
|
-
};
|
|
22
|
-
exports.storeFile = storeFile;
|
|
23
|
-
const deleteFile = async (path) => {
|
|
24
|
-
return promises_1.default.unlink(getPath(path));
|
|
25
|
-
};
|
|
26
|
-
exports.deleteFile = deleteFile;
|
|
27
|
-
const setupProjectFolders = async () => {
|
|
28
|
-
const hasFilesFolder = await filesFolderExists();
|
|
29
|
-
if (!hasFilesFolder) {
|
|
30
|
-
await promises_1.default.mkdir(filesPath, {
|
|
31
|
-
recursive: true
|
|
32
|
-
});
|
|
33
|
-
}
|
|
34
|
-
};
|
|
35
|
-
exports.setupProjectFolders = setupProjectFolders;
|
|
36
|
-
async function filesFolderExists() {
|
|
37
|
-
try {
|
|
38
|
-
const result = await promises_1.default.lstat(filesPath);
|
|
39
|
-
return result.isDirectory();
|
|
40
|
-
}
|
|
41
|
-
catch (e) {
|
|
42
|
-
return false;
|
|
43
|
-
}
|
|
44
|
-
}
|
|
@@ -1,180 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.MySqlConnector = void 0;
|
|
7
|
-
const promise_1 = __importDefault(require("mysql2/promise"));
|
|
8
|
-
const httpError_1 = require("../../utils/httpError");
|
|
9
|
-
const getConnection = ({ dbDatabase, dbPassword, dbUser, dbUrl }) => {
|
|
10
|
-
return promise_1.default.createConnection({
|
|
11
|
-
host: dbUrl,
|
|
12
|
-
user: dbUser,
|
|
13
|
-
database: dbDatabase,
|
|
14
|
-
password: dbPassword,
|
|
15
|
-
// TODO: timeout?
|
|
16
|
-
});
|
|
17
|
-
};
|
|
18
|
-
const extractPrimaryKeys = async (connection) => {
|
|
19
|
-
const query = `
|
|
20
|
-
SELECT TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION
|
|
21
|
-
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
22
|
-
WHERE CONSTRAINT_NAME = 'PRIMARY'
|
|
23
|
-
ORDER BY TABLE_NAME, ORDINAL_POSITION;
|
|
24
|
-
`;
|
|
25
|
-
const [rows] = await connection.execute(query);
|
|
26
|
-
const primaryKeysMap = {};
|
|
27
|
-
rows.forEach(row => {
|
|
28
|
-
const tableName = row.TABLE_NAME;
|
|
29
|
-
const columnName = row.COLUMN_NAME;
|
|
30
|
-
if (!primaryKeysMap[tableName]) {
|
|
31
|
-
primaryKeysMap[tableName] = [];
|
|
32
|
-
}
|
|
33
|
-
primaryKeysMap[tableName].push(columnName);
|
|
34
|
-
});
|
|
35
|
-
return primaryKeysMap;
|
|
36
|
-
};
|
|
37
|
-
const getReferences = async (connection) => {
|
|
38
|
-
const query = `
|
|
39
|
-
SELECT
|
|
40
|
-
TABLE_NAME AS table_name,
|
|
41
|
-
COLUMN_NAME AS field,
|
|
42
|
-
REFERENCED_TABLE_NAME AS referenced_table,
|
|
43
|
-
REFERENCED_COLUMN_NAME AS referenced_field
|
|
44
|
-
FROM
|
|
45
|
-
information_schema.KEY_COLUMN_USAGE
|
|
46
|
-
WHERE
|
|
47
|
-
REFERENCED_TABLE_NAME IS NOT NULL
|
|
48
|
-
AND CONSTRAINT_SCHEMA = DATABASE();
|
|
49
|
-
`;
|
|
50
|
-
const [rows] = await connection.execute(query);
|
|
51
|
-
const result = {};
|
|
52
|
-
if (Array.isArray(rows)) {
|
|
53
|
-
rows.forEach((row) => {
|
|
54
|
-
if (!result[row.table_name]) {
|
|
55
|
-
result[row.table_name] = {};
|
|
56
|
-
}
|
|
57
|
-
result[row.table_name][row.field] = {
|
|
58
|
-
refTable: row.referenced_table,
|
|
59
|
-
refField: row.referenced_field
|
|
60
|
-
};
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
return result;
|
|
64
|
-
};
|
|
65
|
-
const inspectSchema = async (dataSource, connection) => {
|
|
66
|
-
const result = await connection.query('SHOW TABLES');
|
|
67
|
-
const tables = result[0];
|
|
68
|
-
const refs = await getReferences(connection);
|
|
69
|
-
const primaryKeys = await extractPrimaryKeys(connection);
|
|
70
|
-
const rows = tables.map(async (table) => {
|
|
71
|
-
const tableName = Object.values(table)[0];
|
|
72
|
-
const inspectColumnsQuery = `select COLUMN_NAME, DATA_TYPE from information_schema.columns where table_schema = '${dataSource.dbDatabase}' and table_name = '${tableName}'`;
|
|
73
|
-
const [columns] = await connection.query(inspectColumnsQuery);
|
|
74
|
-
const ref = refs[tableName];
|
|
75
|
-
return {
|
|
76
|
-
columns: columns
|
|
77
|
-
.map((column) => ({
|
|
78
|
-
name: column.COLUMN_NAME,
|
|
79
|
-
type: column.DATA_TYPE,
|
|
80
|
-
isPrimary: primaryKeys[tableName]?.includes(column.COLUMN_NAME),
|
|
81
|
-
ref: ref?.[column.COLUMN_NAME] ? {
|
|
82
|
-
table: ref[column.COLUMN_NAME].refTable,
|
|
83
|
-
field: ref[column.COLUMN_NAME].refField,
|
|
84
|
-
} : undefined,
|
|
85
|
-
}))
|
|
86
|
-
.sort((col1, col2) => {
|
|
87
|
-
if (col1.isPrimary && col2.isPrimary) {
|
|
88
|
-
return col1.name.localeCompare(col2.name);
|
|
89
|
-
}
|
|
90
|
-
return col1.isPrimary ? -1 : 1;
|
|
91
|
-
}),
|
|
92
|
-
createdAt: new Date(),
|
|
93
|
-
tableName,
|
|
94
|
-
updatedAt: new Date(),
|
|
95
|
-
};
|
|
96
|
-
});
|
|
97
|
-
return Promise.all(rows);
|
|
98
|
-
};
|
|
99
|
-
const executeQuery = async (query, connection, opts) => {
|
|
100
|
-
try {
|
|
101
|
-
console.log(`[MYSQL CONN] Query: ${query}`);
|
|
102
|
-
const [result, columns] = await connection.query({
|
|
103
|
-
sql: query,
|
|
104
|
-
rowsAsArray: true,
|
|
105
|
-
});
|
|
106
|
-
const responseType = result?.constructor?.name;
|
|
107
|
-
if (responseType === "ResultSetHeader") {
|
|
108
|
-
// UPDATE, INSERT, DELETE
|
|
109
|
-
const resultSet = result;
|
|
110
|
-
if (resultSet.affectedRows > 3 && opts.allowBulkUpdate !== true) {
|
|
111
|
-
throw new Error(`[MYSQL CONN] Bulk update performed without permission.`);
|
|
112
|
-
}
|
|
113
|
-
return {
|
|
114
|
-
columns: [{ column: "affectedRows", alias: "Affected rows", full: "affectedRows" }],
|
|
115
|
-
rows: [[resultSet.affectedRows]],
|
|
116
|
-
query,
|
|
117
|
-
};
|
|
118
|
-
}
|
|
119
|
-
else if (responseType === "Array") {
|
|
120
|
-
const rows = result; // todo: type
|
|
121
|
-
return {
|
|
122
|
-
columns: columns?.map((column) => ({
|
|
123
|
-
column: column.orgName || column.name,
|
|
124
|
-
table: column.orgTable,
|
|
125
|
-
alias: column.name,
|
|
126
|
-
full: column.orgTable ? column.orgTable + "." + column.orgName : column.name,
|
|
127
|
-
})) || [],
|
|
128
|
-
rows,
|
|
129
|
-
query,
|
|
130
|
-
};
|
|
131
|
-
}
|
|
132
|
-
throw new Error(`[MYSQL CONN] Unknown result type: ${responseType}`);
|
|
133
|
-
}
|
|
134
|
-
catch (e) {
|
|
135
|
-
console.error(e);
|
|
136
|
-
if (e instanceof httpError_1.HttpError) {
|
|
137
|
-
throw e;
|
|
138
|
-
}
|
|
139
|
-
throw new httpError_1.HttpError(400, e.message);
|
|
140
|
-
}
|
|
141
|
-
};
|
|
142
|
-
const withTransaction = async (connection, fn) => {
|
|
143
|
-
await connection.beginTransaction();
|
|
144
|
-
try {
|
|
145
|
-
const result = await fn();
|
|
146
|
-
await connection.commit();
|
|
147
|
-
console.log("[MYSQL CONN] Commit");
|
|
148
|
-
return result;
|
|
149
|
-
}
|
|
150
|
-
catch (e) {
|
|
151
|
-
await connection.rollback();
|
|
152
|
-
console.warn(e.message);
|
|
153
|
-
console.log("[MYSQL CONN] Rollback");
|
|
154
|
-
throw e;
|
|
155
|
-
}
|
|
156
|
-
};
|
|
157
|
-
const MySqlConnector = async (dataSource) => {
|
|
158
|
-
const connection = await getConnection(dataSource);
|
|
159
|
-
let _isClosed = false;
|
|
160
|
-
return {
|
|
161
|
-
dbType: 'mysql',
|
|
162
|
-
dataSource,
|
|
163
|
-
inspectSchema: () => inspectSchema(dataSource, connection),
|
|
164
|
-
executeQuery: (query, opts) => {
|
|
165
|
-
if (opts.type === "SELECT") {
|
|
166
|
-
return executeQuery(query, connection, opts);
|
|
167
|
-
}
|
|
168
|
-
return withTransaction(connection, () => executeQuery(query, connection, opts));
|
|
169
|
-
},
|
|
170
|
-
checkConnection: async () => connection.ping(),
|
|
171
|
-
isClosed: () => _isClosed,
|
|
172
|
-
close: async () => {
|
|
173
|
-
if (_isClosed)
|
|
174
|
-
return;
|
|
175
|
-
_isClosed = true;
|
|
176
|
-
return connection.destroy();
|
|
177
|
-
},
|
|
178
|
-
};
|
|
179
|
-
};
|
|
180
|
-
exports.MySqlConnector = MySqlConnector;
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getTokenInfoFromRequest = void 0;
|
|
4
|
-
const getTokenInfoFromRequest = async (request) => {
|
|
5
|
-
return {
|
|
6
|
-
email: "local@localhost",
|
|
7
|
-
sub: "local",
|
|
8
|
-
};
|
|
9
|
-
};
|
|
10
|
-
exports.getTokenInfoFromRequest = getTokenInfoFromRequest;
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.executePrompt = void 0;
|
|
4
|
-
const URL = "https://api.openai.com/v1/chat/completions";
|
|
5
|
-
const executePrompt = async ({ openaiApiKey, messages, model, temperature }) => {
|
|
6
|
-
const request = await fetch(URL, {
|
|
7
|
-
method: "POST",
|
|
8
|
-
headers: {
|
|
9
|
-
"Content-Type": "application/json",
|
|
10
|
-
Authorization: `Bearer ${openaiApiKey}`,
|
|
11
|
-
},
|
|
12
|
-
body: JSON.stringify({
|
|
13
|
-
messages,
|
|
14
|
-
model,
|
|
15
|
-
temperature,
|
|
16
|
-
}),
|
|
17
|
-
});
|
|
18
|
-
return await request.json();
|
|
19
|
-
};
|
|
20
|
-
exports.executePrompt = executePrompt;
|
|
@@ -1,220 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.PGSqlConnector = void 0;
|
|
7
|
-
const pg_1 = __importDefault(require("pg"));
|
|
8
|
-
const httpError_1 = require("../../utils/httpError");
|
|
9
|
-
const getConnection = async ({ dbDatabase, dbPassword, dbUser, dbUrl, dbPort }) => {
|
|
10
|
-
const client = new pg_1.default.Client({
|
|
11
|
-
host: dbUrl,
|
|
12
|
-
user: dbUser,
|
|
13
|
-
database: dbDatabase,
|
|
14
|
-
password: dbPassword,
|
|
15
|
-
port: dbPort,
|
|
16
|
-
query_timeout: 10_000, // 10 seconds
|
|
17
|
-
});
|
|
18
|
-
await client.connect();
|
|
19
|
-
return client;
|
|
20
|
-
};
|
|
21
|
-
const extractPrimaryKeys = async (client) => {
|
|
22
|
-
const query = `
|
|
23
|
-
SELECT
|
|
24
|
-
kcu.table_name,
|
|
25
|
-
kcu.column_name,
|
|
26
|
-
kcu.ordinal_position
|
|
27
|
-
FROM
|
|
28
|
-
information_schema.table_constraints tc
|
|
29
|
-
JOIN information_schema.key_column_usage kcu
|
|
30
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
31
|
-
AND tc.table_schema = kcu.table_schema
|
|
32
|
-
WHERE
|
|
33
|
-
tc.constraint_type = 'PRIMARY KEY'
|
|
34
|
-
ORDER BY
|
|
35
|
-
kcu.table_name, kcu.ordinal_position;
|
|
36
|
-
`;
|
|
37
|
-
const result = await client.query(query);
|
|
38
|
-
const primaryKeysMap = {};
|
|
39
|
-
result.rows.forEach(row => {
|
|
40
|
-
const tableName = row.table_name;
|
|
41
|
-
const columnName = row.column_name;
|
|
42
|
-
if (!primaryKeysMap[tableName]) {
|
|
43
|
-
primaryKeysMap[tableName] = [];
|
|
44
|
-
}
|
|
45
|
-
primaryKeysMap[tableName].push(columnName);
|
|
46
|
-
});
|
|
47
|
-
return primaryKeysMap;
|
|
48
|
-
};
|
|
49
|
-
const getReferences = async (connection) => {
|
|
50
|
-
const query = `
|
|
51
|
-
SELECT
|
|
52
|
-
tc.table_name AS table_name,
|
|
53
|
-
kcu.column_name AS field,
|
|
54
|
-
ccu.table_name AS referenced_table,
|
|
55
|
-
ccu.column_name AS referenced_field
|
|
56
|
-
FROM
|
|
57
|
-
information_schema.table_constraints AS tc
|
|
58
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
59
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
60
|
-
AND tc.table_schema = kcu.table_schema
|
|
61
|
-
JOIN information_schema.constraint_column_usage AS ccu
|
|
62
|
-
ON ccu.constraint_name = tc.constraint_name
|
|
63
|
-
AND ccu.table_schema = tc.table_schema
|
|
64
|
-
WHERE tc.constraint_type = 'FOREIGN KEY';
|
|
65
|
-
`;
|
|
66
|
-
const res = await connection.query(query);
|
|
67
|
-
const result = {};
|
|
68
|
-
res.rows.forEach(row => {
|
|
69
|
-
if (!result[row.table_name]) {
|
|
70
|
-
result[row.table_name] = {};
|
|
71
|
-
}
|
|
72
|
-
result[row.table_name][row.field] = {
|
|
73
|
-
refTable: row.referenced_table,
|
|
74
|
-
refField: row.referenced_field
|
|
75
|
-
};
|
|
76
|
-
});
|
|
77
|
-
return result;
|
|
78
|
-
};
|
|
79
|
-
const inspectSchema = async (dataSource, connection) => {
|
|
80
|
-
const tableQuery = `SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname = '${dataSource.dbSchema}'`;
|
|
81
|
-
const result = await connection.query(tableQuery);
|
|
82
|
-
const tables = result.rows;
|
|
83
|
-
const refs = await getReferences(connection);
|
|
84
|
-
const primaryKeys = await extractPrimaryKeys(connection);
|
|
85
|
-
const rows = tables.map(async (table) => {
|
|
86
|
-
const tableName = Object.values(table)[0];
|
|
87
|
-
const pgQuery = `
|
|
88
|
-
SELECT column_name, data_type
|
|
89
|
-
FROM information_schema.columns
|
|
90
|
-
WHERE
|
|
91
|
-
table_name = '${tableName}' and
|
|
92
|
-
table_schema = '${dataSource.dbSchema}'
|
|
93
|
-
`;
|
|
94
|
-
const { rows } = await connection.query(pgQuery);
|
|
95
|
-
const ref = refs[tableName];
|
|
96
|
-
return {
|
|
97
|
-
columns: rows
|
|
98
|
-
.map((column) => ({
|
|
99
|
-
name: column.column_name,
|
|
100
|
-
type: column.data_type,
|
|
101
|
-
isPrimary: primaryKeys[tableName]?.includes(column.column_name),
|
|
102
|
-
ref: ref?.[column.column_name] ? {
|
|
103
|
-
table: ref[column.column_name].refTable,
|
|
104
|
-
field: ref[column.column_name].refField,
|
|
105
|
-
} : undefined,
|
|
106
|
-
}))
|
|
107
|
-
.sort((col1, col2) => {
|
|
108
|
-
if (col1.isPrimary && col2.isPrimary) {
|
|
109
|
-
return col1.name.localeCompare(col2.name);
|
|
110
|
-
}
|
|
111
|
-
return col1.isPrimary ? -1 : 1;
|
|
112
|
-
}),
|
|
113
|
-
createdAt: new Date(),
|
|
114
|
-
tableName,
|
|
115
|
-
updatedAt: new Date(),
|
|
116
|
-
};
|
|
117
|
-
});
|
|
118
|
-
return Promise.all(rows);
|
|
119
|
-
};
|
|
120
|
-
const extractTableNames = async (columnIds, connection) => {
|
|
121
|
-
const query = `select relname, attname, concat(pg_class.oid, '-', attnum) as row_key
|
|
122
|
-
from pg_attribute
|
|
123
|
-
left join pg_class on pg_attribute.attrelid = pg_class.oid
|
|
124
|
-
where
|
|
125
|
-
concat(pg_class.oid, '-', attnum) IN (${columnIds.join(", ")})
|
|
126
|
-
limit 25;`;
|
|
127
|
-
const result = await connection.query(query);
|
|
128
|
-
return result.rows.reduce((acc, row) => {
|
|
129
|
-
acc[row.row_key] = { table: row.relname, column: row.attname };
|
|
130
|
-
return acc;
|
|
131
|
-
}, {});
|
|
132
|
-
};
|
|
133
|
-
const executeQuery = async (query, connection, opts) => {
|
|
134
|
-
try {
|
|
135
|
-
console.log(`[PG CONN] Query: ${query}`);
|
|
136
|
-
const { rows, fields, command, rowCount } = await connection.query({
|
|
137
|
-
text: query,
|
|
138
|
-
rowMode: "array",
|
|
139
|
-
});
|
|
140
|
-
if (command === "UPDATE" || command === "INSERT" || command === "DELETE") {
|
|
141
|
-
if (rowCount != null && rowCount > 3 && opts.allowBulkUpdate !== true) {
|
|
142
|
-
throw new Error(`[PG CONN] Bulk update performed without permission.`);
|
|
143
|
-
}
|
|
144
|
-
return {
|
|
145
|
-
columns: [{ column: "affectedRows", alias: "Affected rows", full: "affectedRows" }],
|
|
146
|
-
rows: [[rowCount]],
|
|
147
|
-
query,
|
|
148
|
-
};
|
|
149
|
-
}
|
|
150
|
-
if (command === "SELECT") {
|
|
151
|
-
const cols = fields.map((column) => `'${column.tableID}-${column.columnID}'`);
|
|
152
|
-
const headerOG = await extractTableNames(cols, connection);
|
|
153
|
-
return {
|
|
154
|
-
columns: fields.map((column) => {
|
|
155
|
-
const ogCol = headerOG[`${column.tableID}-${column.columnID}`];
|
|
156
|
-
return {
|
|
157
|
-
column: ogCol?.column || column.name,
|
|
158
|
-
alias: column.name,
|
|
159
|
-
table: ogCol?.table || '',
|
|
160
|
-
full: ogCol ? ogCol.table + "." + ogCol.column : column.name,
|
|
161
|
-
};
|
|
162
|
-
}),
|
|
163
|
-
rows: rows,
|
|
164
|
-
query,
|
|
165
|
-
};
|
|
166
|
-
}
|
|
167
|
-
throw new Error(`[PG CONN] Unsupported command: ${command}`);
|
|
168
|
-
}
|
|
169
|
-
catch (e) {
|
|
170
|
-
if (e instanceof httpError_1.HttpError) {
|
|
171
|
-
throw e;
|
|
172
|
-
}
|
|
173
|
-
throw new httpError_1.HttpError(400, e.message);
|
|
174
|
-
}
|
|
175
|
-
};
|
|
176
|
-
const withTransaction = async (client, fn) => {
|
|
177
|
-
await client.query("BEGIN");
|
|
178
|
-
try {
|
|
179
|
-
const result = await fn();
|
|
180
|
-
await client.query("COMMIT");
|
|
181
|
-
console.log(`[PG CONN] Commit`);
|
|
182
|
-
return result;
|
|
183
|
-
}
|
|
184
|
-
catch (e) {
|
|
185
|
-
await client.query("ROLLBACK");
|
|
186
|
-
console.log(`[PG CONN] Rollback`);
|
|
187
|
-
throw e;
|
|
188
|
-
}
|
|
189
|
-
};
|
|
190
|
-
const PGSqlConnector = async (dataSource) => {
|
|
191
|
-
const client = await getConnection(dataSource);
|
|
192
|
-
let _isClosed = false;
|
|
193
|
-
let isPathSet = false;
|
|
194
|
-
const withPathSet = async (fn) => {
|
|
195
|
-
if (!isPathSet) {
|
|
196
|
-
await client.query(`SET search_path TO ${dataSource.dbSchema}`);
|
|
197
|
-
}
|
|
198
|
-
return fn();
|
|
199
|
-
};
|
|
200
|
-
return {
|
|
201
|
-
dbType: 'postgres',
|
|
202
|
-
dataSource,
|
|
203
|
-
inspectSchema: () => inspectSchema(dataSource, client),
|
|
204
|
-
executeQuery: (query, opts) => withPathSet(() => {
|
|
205
|
-
if (opts.type === "SELECT") {
|
|
206
|
-
return executeQuery(query, client, opts);
|
|
207
|
-
}
|
|
208
|
-
return withTransaction(client, () => executeQuery(query, client, opts));
|
|
209
|
-
}),
|
|
210
|
-
checkConnection: async () => { },
|
|
211
|
-
isClosed: () => _isClosed,
|
|
212
|
-
close: async () => {
|
|
213
|
-
if (_isClosed)
|
|
214
|
-
return;
|
|
215
|
-
_isClosed = true;
|
|
216
|
-
return client.end();
|
|
217
|
-
},
|
|
218
|
-
};
|
|
219
|
-
};
|
|
220
|
-
exports.PGSqlConnector = PGSqlConnector;
|
|
@@ -1,207 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.runRawSqlQuery = exports.runUserSqlQuery = exports.runUserSqlPrompt = exports.generateSqlFromPrompt = void 0;
|
|
4
|
-
const httpError_1 = require("../../utils/httpError");
|
|
5
|
-
const openai_1 = require("../openai");
|
|
6
|
-
const connectorManager_1 = require("../connectorManager");
|
|
7
|
-
const prompts_1 = require("../../utils/prompts");
|
|
8
|
-
const db_1 = require("../../repository/db");
|
|
9
|
-
const sql_builder_1 = require("sql-builder");
|
|
10
|
-
const typeorm_1 = require("typeorm");
|
|
11
|
-
const rawSql_1 = require("../../utils/rawSql");
|
|
12
|
-
const sanitizeQuery = (query) => {
|
|
13
|
-
// transform query from ```sql\nSELECT COUNT(*) FROM users;\n``` to SELECT COUNT(*) FROM users;
|
|
14
|
-
return query.replace(/```sql\n/g, "").replace(/\n```/g, "");
|
|
15
|
-
};
|
|
16
|
-
function getSize(size) {
|
|
17
|
-
return size || 20;
|
|
18
|
-
}
|
|
19
|
-
function getPage(page) {
|
|
20
|
-
return page || 0;
|
|
21
|
-
}
|
|
22
|
-
const generateSqlFromPrompt = async (datasourceId, userId, userPrompt) => {
|
|
23
|
-
const [dataSource, userSettings] = await Promise.all([
|
|
24
|
-
db_1.DataSourceRepository.findOne({
|
|
25
|
-
where: {
|
|
26
|
-
id: datasourceId,
|
|
27
|
-
},
|
|
28
|
-
relations: {
|
|
29
|
-
inspections: true,
|
|
30
|
-
}
|
|
31
|
-
}),
|
|
32
|
-
db_1.UserSettingsRepository.findOneBy({
|
|
33
|
-
user: {
|
|
34
|
-
id: userId,
|
|
35
|
-
}
|
|
36
|
-
})
|
|
37
|
-
]);
|
|
38
|
-
if (!dataSource) {
|
|
39
|
-
throw new httpError_1.HttpError(404, "Data source not found");
|
|
40
|
-
}
|
|
41
|
-
const openAiToken = userSettings?.openAiToken;
|
|
42
|
-
if (!openAiToken) {
|
|
43
|
-
throw new httpError_1.HttpError(404, "API key not found. Please set API key in settings");
|
|
44
|
-
}
|
|
45
|
-
const model = userSettings?.model;
|
|
46
|
-
if (!model) {
|
|
47
|
-
throw new httpError_1.HttpError(404, "Model not found. Please set model in settings");
|
|
48
|
-
}
|
|
49
|
-
const dbString = (0, prompts_1.buildDbString)(dataSource.inspections);
|
|
50
|
-
const result = await (0, openai_1.executePrompt)({
|
|
51
|
-
model,
|
|
52
|
-
temperature: 0.5,
|
|
53
|
-
messages: [
|
|
54
|
-
{
|
|
55
|
-
role: "system",
|
|
56
|
-
content: `Act as a ${dataSource.dbType} query writer. You can only answer with plain query text. Do not write explanations. Always limit queries to max 100 rows. Here is db schema:\n${dbString}`,
|
|
57
|
-
},
|
|
58
|
-
{
|
|
59
|
-
role: "user",
|
|
60
|
-
content: userPrompt,
|
|
61
|
-
},
|
|
62
|
-
],
|
|
63
|
-
openaiApiKey: openAiToken,
|
|
64
|
-
});
|
|
65
|
-
const query = sanitizeQuery(result.choices[0].message.content);
|
|
66
|
-
return {
|
|
67
|
-
sql: query,
|
|
68
|
-
dataSource,
|
|
69
|
-
};
|
|
70
|
-
};
|
|
71
|
-
exports.generateSqlFromPrompt = generateSqlFromPrompt;
|
|
72
|
-
const runUserSqlPrompt = async (req, datasourceId, userId, userPrompt) => {
|
|
73
|
-
const { dataSource, sql } = await (0, exports.generateSqlFromPrompt)(datasourceId, userId, userPrompt);
|
|
74
|
-
const dbConnectionManager = await (0, connectorManager_1.getDynamicConnection)(dataSource, req);
|
|
75
|
-
const queryBuilder = new sql_builder_1.SQLManipulator(dataSource.dbType, sql);
|
|
76
|
-
queryBuilder
|
|
77
|
-
.setLimit(20);
|
|
78
|
-
return dbConnectionManager.executeQuery(queryBuilder.toExecutableSQL(), {
|
|
79
|
-
type: queryBuilder.getParsed().type,
|
|
80
|
-
allowBulkUpdate: false,
|
|
81
|
-
});
|
|
82
|
-
};
|
|
83
|
-
exports.runUserSqlPrompt = runUserSqlPrompt;
|
|
84
|
-
function handleAlias(value) {
|
|
85
|
-
if (value.includes(" ") && !value.startsWith("'")) {
|
|
86
|
-
return `\`${value}\``;
|
|
87
|
-
}
|
|
88
|
-
return value;
|
|
89
|
-
}
|
|
90
|
-
const runUserSqlQuery = async (req, { table, variables, datasourceId, filters, joins, orderBy, size, page, columns, groupBy }) => {
|
|
91
|
-
const dataSource = await db_1.DataSourceRepository.findOneBy({
|
|
92
|
-
id: datasourceId,
|
|
93
|
-
});
|
|
94
|
-
const tables = [];
|
|
95
|
-
const allColumns = [];
|
|
96
|
-
if (!dataSource) {
|
|
97
|
-
throw new httpError_1.HttpError(404, "Data source not found");
|
|
98
|
-
}
|
|
99
|
-
const queryBuilder = new sql_builder_1.SQLManipulator(dataSource.dbType, "SELECT");
|
|
100
|
-
queryBuilder.setTable(table);
|
|
101
|
-
if (variables) {
|
|
102
|
-
queryBuilder.setParameters(variables);
|
|
103
|
-
}
|
|
104
|
-
if (queryBuilder.getParsed().type !== "SELECT") {
|
|
105
|
-
throw new httpError_1.HttpError(400, "Only SELECT queries are allowed in this endpoint");
|
|
106
|
-
}
|
|
107
|
-
queryBuilder.setLimit(size || 20);
|
|
108
|
-
queryBuilder.setOffset(size * page);
|
|
109
|
-
filters?.forEach((w) => {
|
|
110
|
-
queryBuilder.addWhere(processWhereCondition(w));
|
|
111
|
-
});
|
|
112
|
-
if (joins) {
|
|
113
|
-
queryBuilder.addJoin(...joins);
|
|
114
|
-
}
|
|
115
|
-
if (orderBy) {
|
|
116
|
-
queryBuilder.addOrderBy(...orderBy.map((o) => ({
|
|
117
|
-
...o,
|
|
118
|
-
column: handleAlias(o.column),
|
|
119
|
-
})));
|
|
120
|
-
}
|
|
121
|
-
if (columns && columns.length > 0) {
|
|
122
|
-
queryBuilder.selectColumns(columns);
|
|
123
|
-
}
|
|
124
|
-
if (groupBy && groupBy.length > 0) {
|
|
125
|
-
groupBy.forEach((g) => queryBuilder.addGroupBy(g));
|
|
126
|
-
}
|
|
127
|
-
const parsed = queryBuilder.getParsed();
|
|
128
|
-
if (parsed.table) {
|
|
129
|
-
tables.push(parsed.table);
|
|
130
|
-
}
|
|
131
|
-
if (parsed.joins && parsed.joins.length > 0) {
|
|
132
|
-
parsed.joins.forEach((join) => {
|
|
133
|
-
tables.push(join.table);
|
|
134
|
-
});
|
|
135
|
-
}
|
|
136
|
-
const info = await db_1.DatabaseInspectionRepository.find({
|
|
137
|
-
where: {
|
|
138
|
-
tableName: (0, typeorm_1.In)(tables),
|
|
139
|
-
datasource: {
|
|
140
|
-
id: datasourceId,
|
|
141
|
-
},
|
|
142
|
-
},
|
|
143
|
-
});
|
|
144
|
-
for (const table of info) {
|
|
145
|
-
if (!table.columns)
|
|
146
|
-
continue;
|
|
147
|
-
for (const column of table.columns) {
|
|
148
|
-
allColumns.push({ column: column.name, table: table.tableName || '' });
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
const dbConnectionManager = await (0, connectorManager_1.getDynamicConnection)(dataSource, req);
|
|
152
|
-
const result = await dbConnectionManager.executeQuery(queryBuilder.toExecutableSQL(), {
|
|
153
|
-
type: queryBuilder.getParsed().type,
|
|
154
|
-
allowBulkUpdate: false,
|
|
155
|
-
});
|
|
156
|
-
return {
|
|
157
|
-
...result,
|
|
158
|
-
tables,
|
|
159
|
-
allColumns,
|
|
160
|
-
};
|
|
161
|
-
};
|
|
162
|
-
exports.runUserSqlQuery = runUserSqlQuery;
|
|
163
|
-
const runRawSqlQuery = async (req, { sql, variables, datasourceId, size, page }) => {
|
|
164
|
-
const dataSource = await db_1.DataSourceRepository.findOneBy({
|
|
165
|
-
id: datasourceId,
|
|
166
|
-
});
|
|
167
|
-
if (!dataSource) {
|
|
168
|
-
throw new httpError_1.HttpError(404, "Data source not found");
|
|
169
|
-
}
|
|
170
|
-
const queryType = (0, rawSql_1.detectQueryType)(sql);
|
|
171
|
-
let query = sql;
|
|
172
|
-
if (queryType === "SELECT") {
|
|
173
|
-
if (query.match(/LIMIT|OFFSET/ig)) {
|
|
174
|
-
throw new httpError_1.HttpError(400, "Queries are automatically paginated, do not use LIMIT and/or OFFSET");
|
|
175
|
-
}
|
|
176
|
-
const finalSize = getSize(size);
|
|
177
|
-
query = (0, rawSql_1.applyRawPagination)(query, finalSize, finalSize * getPage(page));
|
|
178
|
-
}
|
|
179
|
-
if (variables) {
|
|
180
|
-
query = (0, rawSql_1.replaceSqlVariables)(query, variables);
|
|
181
|
-
}
|
|
182
|
-
const dbConnectionManager = await (0, connectorManager_1.getDynamicConnection)(dataSource, req);
|
|
183
|
-
return dbConnectionManager.executeQuery(query, {
|
|
184
|
-
type: queryType,
|
|
185
|
-
allowBulkUpdate: false,
|
|
186
|
-
});
|
|
187
|
-
};
|
|
188
|
-
exports.runRawSqlQuery = runRawSqlQuery;
|
|
189
|
-
function processWhereCondition(where) {
|
|
190
|
-
const w = { ...where };
|
|
191
|
-
switch (w.operator) {
|
|
192
|
-
case "IS NULL":
|
|
193
|
-
case "IS NOT NULL":
|
|
194
|
-
w.value = undefined;
|
|
195
|
-
break;
|
|
196
|
-
case "LIKE":
|
|
197
|
-
case "ILIKE":
|
|
198
|
-
case "NOT LIKE":
|
|
199
|
-
case "NOT ILIKE":
|
|
200
|
-
w.value = w.value?.map((v) => ({
|
|
201
|
-
isColumn: v?.isColumn,
|
|
202
|
-
value: `%${v?.value}%`
|
|
203
|
-
}));
|
|
204
|
-
break;
|
|
205
|
-
}
|
|
206
|
-
return w;
|
|
207
|
-
}
|