@dataramen/cli 0.0.8 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +67 -0
- package/dist/README.md +67 -0
- package/dist/code/api/chat/router.js +55 -0
- package/dist/code/api/dataSources/router.js +147 -0
- package/dist/code/api/dataSources/types.js +2 -0
- package/dist/code/api/dataSources/validators.js +22 -0
- package/dist/code/api/project/router.js +100 -0
- package/dist/code/api/queries/router.js +122 -0
- package/dist/code/api/runner/router.js +22 -0
- package/dist/code/api/status/router.js +17 -0
- package/dist/code/api/teams/router.js +35 -0
- package/dist/code/api/userSettings/router.js +54 -0
- package/dist/code/api/users/router.js +91 -0
- package/dist/code/api/workbooks/router.js +123 -0
- package/dist/code/api/workbooks/types.js +2 -0
- package/dist/code/env.js +25 -0
- package/dist/code/index.js +86 -0
- package/dist/code/proxy.js +8 -8
- package/dist/code/repository/db.js +58 -0
- package/dist/code/repository/tables/databaseInspection.js +40 -0
- package/dist/code/repository/tables/datasource.js +86 -0
- package/dist/code/repository/tables/query.js +50 -0
- package/dist/code/repository/tables/teams.js +48 -0
- package/dist/code/repository/tables/userSettings.js +39 -0
- package/dist/code/repository/tables/users.js +42 -0
- package/dist/code/repository/tables/workbook.js +43 -0
- package/dist/code/services/connectorManager/index.js +38 -0
- package/dist/code/services/connectorManager/types.js +2 -0
- package/dist/code/services/files/index.js +44 -0
- package/dist/code/services/mysqlConnector/index.js +180 -0
- package/dist/code/services/oauthClient/oauth2Client.js +10 -0
- package/dist/code/services/openai/index.js +20 -0
- package/dist/code/services/openai/types.js +2 -0
- package/dist/code/services/pgConnector/index.js +220 -0
- package/dist/code/services/userSqlPromptRunner/index.js +207 -0
- package/dist/code/types/connectors.js +2 -0
- package/dist/code/utils/createRouter.js +10 -0
- package/dist/code/utils/httpError.js +13 -0
- package/dist/code/utils/prompts.js +11 -0
- package/dist/code/utils/queryUtils.js +18 -0
- package/dist/code/utils/rawSql.js +32 -0
- package/dist/code/utils/request.js +35 -0
- package/dist/code/utils/token.js +8 -0
- package/dist/package.json +1 -1
- package/package.json +21 -1
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.executePrompt = void 0;
|
|
4
|
+
const URL = "https://api.openai.com/v1/chat/completions";
|
|
5
|
+
const executePrompt = async ({ openaiApiKey, messages, model, temperature }) => {
|
|
6
|
+
const request = await fetch(URL, {
|
|
7
|
+
method: "POST",
|
|
8
|
+
headers: {
|
|
9
|
+
"Content-Type": "application/json",
|
|
10
|
+
Authorization: `Bearer ${openaiApiKey}`,
|
|
11
|
+
},
|
|
12
|
+
body: JSON.stringify({
|
|
13
|
+
messages,
|
|
14
|
+
model,
|
|
15
|
+
temperature,
|
|
16
|
+
}),
|
|
17
|
+
});
|
|
18
|
+
return await request.json();
|
|
19
|
+
};
|
|
20
|
+
exports.executePrompt = executePrompt;
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.PGSqlConnector = void 0;
|
|
7
|
+
const pg_1 = __importDefault(require("pg"));
|
|
8
|
+
const httpError_1 = require("../../utils/httpError");
|
|
9
|
+
const getConnection = async ({ dbDatabase, dbPassword, dbUser, dbUrl, dbPort }) => {
|
|
10
|
+
const client = new pg_1.default.Client({
|
|
11
|
+
host: dbUrl,
|
|
12
|
+
user: dbUser,
|
|
13
|
+
database: dbDatabase,
|
|
14
|
+
password: dbPassword,
|
|
15
|
+
port: dbPort,
|
|
16
|
+
query_timeout: 10_000, // 10 seconds
|
|
17
|
+
});
|
|
18
|
+
await client.connect();
|
|
19
|
+
return client;
|
|
20
|
+
};
|
|
21
|
+
const extractPrimaryKeys = async (client) => {
|
|
22
|
+
const query = `
|
|
23
|
+
SELECT
|
|
24
|
+
kcu.table_name,
|
|
25
|
+
kcu.column_name,
|
|
26
|
+
kcu.ordinal_position
|
|
27
|
+
FROM
|
|
28
|
+
information_schema.table_constraints tc
|
|
29
|
+
JOIN information_schema.key_column_usage kcu
|
|
30
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
31
|
+
AND tc.table_schema = kcu.table_schema
|
|
32
|
+
WHERE
|
|
33
|
+
tc.constraint_type = 'PRIMARY KEY'
|
|
34
|
+
ORDER BY
|
|
35
|
+
kcu.table_name, kcu.ordinal_position;
|
|
36
|
+
`;
|
|
37
|
+
const result = await client.query(query);
|
|
38
|
+
const primaryKeysMap = {};
|
|
39
|
+
result.rows.forEach(row => {
|
|
40
|
+
const tableName = row.table_name;
|
|
41
|
+
const columnName = row.column_name;
|
|
42
|
+
if (!primaryKeysMap[tableName]) {
|
|
43
|
+
primaryKeysMap[tableName] = [];
|
|
44
|
+
}
|
|
45
|
+
primaryKeysMap[tableName].push(columnName);
|
|
46
|
+
});
|
|
47
|
+
return primaryKeysMap;
|
|
48
|
+
};
|
|
49
|
+
const getReferences = async (connection) => {
|
|
50
|
+
const query = `
|
|
51
|
+
SELECT
|
|
52
|
+
tc.table_name AS table_name,
|
|
53
|
+
kcu.column_name AS field,
|
|
54
|
+
ccu.table_name AS referenced_table,
|
|
55
|
+
ccu.column_name AS referenced_field
|
|
56
|
+
FROM
|
|
57
|
+
information_schema.table_constraints AS tc
|
|
58
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
59
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
60
|
+
AND tc.table_schema = kcu.table_schema
|
|
61
|
+
JOIN information_schema.constraint_column_usage AS ccu
|
|
62
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
63
|
+
AND ccu.table_schema = tc.table_schema
|
|
64
|
+
WHERE tc.constraint_type = 'FOREIGN KEY';
|
|
65
|
+
`;
|
|
66
|
+
const res = await connection.query(query);
|
|
67
|
+
const result = {};
|
|
68
|
+
res.rows.forEach(row => {
|
|
69
|
+
if (!result[row.table_name]) {
|
|
70
|
+
result[row.table_name] = {};
|
|
71
|
+
}
|
|
72
|
+
result[row.table_name][row.field] = {
|
|
73
|
+
refTable: row.referenced_table,
|
|
74
|
+
refField: row.referenced_field
|
|
75
|
+
};
|
|
76
|
+
});
|
|
77
|
+
return result;
|
|
78
|
+
};
|
|
79
|
+
const inspectSchema = async (dataSource, connection) => {
|
|
80
|
+
const tableQuery = `SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname = '${dataSource.dbSchema}'`;
|
|
81
|
+
const result = await connection.query(tableQuery);
|
|
82
|
+
const tables = result.rows;
|
|
83
|
+
const refs = await getReferences(connection);
|
|
84
|
+
const primaryKeys = await extractPrimaryKeys(connection);
|
|
85
|
+
const rows = tables.map(async (table) => {
|
|
86
|
+
const tableName = Object.values(table)[0];
|
|
87
|
+
const pgQuery = `
|
|
88
|
+
SELECT column_name, data_type
|
|
89
|
+
FROM information_schema.columns
|
|
90
|
+
WHERE
|
|
91
|
+
table_name = '${tableName}' and
|
|
92
|
+
table_schema = '${dataSource.dbSchema}'
|
|
93
|
+
`;
|
|
94
|
+
const { rows } = await connection.query(pgQuery);
|
|
95
|
+
const ref = refs[tableName];
|
|
96
|
+
return {
|
|
97
|
+
columns: rows
|
|
98
|
+
.map((column) => ({
|
|
99
|
+
name: column.column_name,
|
|
100
|
+
type: column.data_type,
|
|
101
|
+
isPrimary: primaryKeys[tableName]?.includes(column.column_name),
|
|
102
|
+
ref: ref?.[column.column_name] ? {
|
|
103
|
+
table: ref[column.column_name].refTable,
|
|
104
|
+
field: ref[column.column_name].refField,
|
|
105
|
+
} : undefined,
|
|
106
|
+
}))
|
|
107
|
+
.sort((col1, col2) => {
|
|
108
|
+
if (col1.isPrimary && col2.isPrimary) {
|
|
109
|
+
return col1.name.localeCompare(col2.name);
|
|
110
|
+
}
|
|
111
|
+
return col1.isPrimary ? -1 : 1;
|
|
112
|
+
}),
|
|
113
|
+
createdAt: new Date(),
|
|
114
|
+
tableName,
|
|
115
|
+
updatedAt: new Date(),
|
|
116
|
+
};
|
|
117
|
+
});
|
|
118
|
+
return Promise.all(rows);
|
|
119
|
+
};
|
|
120
|
+
const extractTableNames = async (columnIds, connection) => {
|
|
121
|
+
const query = `select relname, attname, concat(pg_class.oid, '-', attnum) as row_key
|
|
122
|
+
from pg_attribute
|
|
123
|
+
left join pg_class on pg_attribute.attrelid = pg_class.oid
|
|
124
|
+
where
|
|
125
|
+
concat(pg_class.oid, '-', attnum) IN (${columnIds.join(", ")})
|
|
126
|
+
limit 25;`;
|
|
127
|
+
const result = await connection.query(query);
|
|
128
|
+
return result.rows.reduce((acc, row) => {
|
|
129
|
+
acc[row.row_key] = { table: row.relname, column: row.attname };
|
|
130
|
+
return acc;
|
|
131
|
+
}, {});
|
|
132
|
+
};
|
|
133
|
+
const executeQuery = async (query, connection, opts) => {
|
|
134
|
+
try {
|
|
135
|
+
console.log(`[PG CONN] Query: ${query}`);
|
|
136
|
+
const { rows, fields, command, rowCount } = await connection.query({
|
|
137
|
+
text: query,
|
|
138
|
+
rowMode: "array",
|
|
139
|
+
});
|
|
140
|
+
if (command === "UPDATE" || command === "INSERT" || command === "DELETE") {
|
|
141
|
+
if (rowCount != null && rowCount > 3 && opts.allowBulkUpdate !== true) {
|
|
142
|
+
throw new Error(`[PG CONN] Bulk update performed without permission.`);
|
|
143
|
+
}
|
|
144
|
+
return {
|
|
145
|
+
columns: [{ column: "affectedRows", alias: "Affected rows", full: "affectedRows" }],
|
|
146
|
+
rows: [[rowCount]],
|
|
147
|
+
query,
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
if (command === "SELECT") {
|
|
151
|
+
const cols = fields.map((column) => `'${column.tableID}-${column.columnID}'`);
|
|
152
|
+
const headerOG = await extractTableNames(cols, connection);
|
|
153
|
+
return {
|
|
154
|
+
columns: fields.map((column) => {
|
|
155
|
+
const ogCol = headerOG[`${column.tableID}-${column.columnID}`];
|
|
156
|
+
return {
|
|
157
|
+
column: ogCol?.column || column.name,
|
|
158
|
+
alias: column.name,
|
|
159
|
+
table: ogCol?.table || '',
|
|
160
|
+
full: ogCol ? ogCol.table + "." + ogCol.column : column.name,
|
|
161
|
+
};
|
|
162
|
+
}),
|
|
163
|
+
rows: rows,
|
|
164
|
+
query,
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
throw new Error(`[PG CONN] Unsupported command: ${command}`);
|
|
168
|
+
}
|
|
169
|
+
catch (e) {
|
|
170
|
+
if (e instanceof httpError_1.HttpError) {
|
|
171
|
+
throw e;
|
|
172
|
+
}
|
|
173
|
+
throw new httpError_1.HttpError(400, e.message);
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
const withTransaction = async (client, fn) => {
|
|
177
|
+
await client.query("BEGIN");
|
|
178
|
+
try {
|
|
179
|
+
const result = await fn();
|
|
180
|
+
await client.query("COMMIT");
|
|
181
|
+
console.log(`[PG CONN] Commit`);
|
|
182
|
+
return result;
|
|
183
|
+
}
|
|
184
|
+
catch (e) {
|
|
185
|
+
await client.query("ROLLBACK");
|
|
186
|
+
console.log(`[PG CONN] Rollback`);
|
|
187
|
+
throw e;
|
|
188
|
+
}
|
|
189
|
+
};
|
|
190
|
+
const PGSqlConnector = async (dataSource) => {
|
|
191
|
+
const client = await getConnection(dataSource);
|
|
192
|
+
let _isClosed = false;
|
|
193
|
+
let isPathSet = false;
|
|
194
|
+
const withPathSet = async (fn) => {
|
|
195
|
+
if (!isPathSet) {
|
|
196
|
+
await client.query(`SET search_path TO ${dataSource.dbSchema}`);
|
|
197
|
+
}
|
|
198
|
+
return fn();
|
|
199
|
+
};
|
|
200
|
+
return {
|
|
201
|
+
dbType: 'postgres',
|
|
202
|
+
dataSource,
|
|
203
|
+
inspectSchema: () => inspectSchema(dataSource, client),
|
|
204
|
+
executeQuery: (query, opts) => withPathSet(() => {
|
|
205
|
+
if (opts.type === "SELECT") {
|
|
206
|
+
return executeQuery(query, client, opts);
|
|
207
|
+
}
|
|
208
|
+
return withTransaction(client, () => executeQuery(query, client, opts));
|
|
209
|
+
}),
|
|
210
|
+
checkConnection: async () => { },
|
|
211
|
+
isClosed: () => _isClosed,
|
|
212
|
+
close: async () => {
|
|
213
|
+
if (_isClosed)
|
|
214
|
+
return;
|
|
215
|
+
_isClosed = true;
|
|
216
|
+
return client.end();
|
|
217
|
+
},
|
|
218
|
+
};
|
|
219
|
+
};
|
|
220
|
+
exports.PGSqlConnector = PGSqlConnector;
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.runRawSqlQuery = exports.runUserSqlQuery = exports.runUserSqlPrompt = exports.generateSqlFromPrompt = void 0;
|
|
4
|
+
const httpError_1 = require("../../utils/httpError");
|
|
5
|
+
const openai_1 = require("../openai");
|
|
6
|
+
const connectorManager_1 = require("../connectorManager");
|
|
7
|
+
const prompts_1 = require("../../utils/prompts");
|
|
8
|
+
const db_1 = require("../../repository/db");
|
|
9
|
+
const sql_builder_1 = require("sql-builder");
|
|
10
|
+
const typeorm_1 = require("typeorm");
|
|
11
|
+
const rawSql_1 = require("../../utils/rawSql");
|
|
12
|
+
const sanitizeQuery = (query) => {
|
|
13
|
+
// transform query from ```sql\nSELECT COUNT(*) FROM users;\n``` to SELECT COUNT(*) FROM users;
|
|
14
|
+
return query.replace(/```sql\n/g, "").replace(/\n```/g, "");
|
|
15
|
+
};
|
|
16
|
+
function getSize(size) {
|
|
17
|
+
return size || 20;
|
|
18
|
+
}
|
|
19
|
+
function getPage(page) {
|
|
20
|
+
return page || 0;
|
|
21
|
+
}
|
|
22
|
+
const generateSqlFromPrompt = async (datasourceId, userId, userPrompt) => {
|
|
23
|
+
const [dataSource, userSettings] = await Promise.all([
|
|
24
|
+
db_1.DataSourceRepository.findOne({
|
|
25
|
+
where: {
|
|
26
|
+
id: datasourceId,
|
|
27
|
+
},
|
|
28
|
+
relations: {
|
|
29
|
+
inspections: true,
|
|
30
|
+
}
|
|
31
|
+
}),
|
|
32
|
+
db_1.UserSettingsRepository.findOneBy({
|
|
33
|
+
user: {
|
|
34
|
+
id: userId,
|
|
35
|
+
}
|
|
36
|
+
})
|
|
37
|
+
]);
|
|
38
|
+
if (!dataSource) {
|
|
39
|
+
throw new httpError_1.HttpError(404, "Data source not found");
|
|
40
|
+
}
|
|
41
|
+
const openAiToken = userSettings?.openAiToken;
|
|
42
|
+
if (!openAiToken) {
|
|
43
|
+
throw new httpError_1.HttpError(404, "API key not found. Please set API key in settings");
|
|
44
|
+
}
|
|
45
|
+
const model = userSettings?.model;
|
|
46
|
+
if (!model) {
|
|
47
|
+
throw new httpError_1.HttpError(404, "Model not found. Please set model in settings");
|
|
48
|
+
}
|
|
49
|
+
const dbString = (0, prompts_1.buildDbString)(dataSource.inspections);
|
|
50
|
+
const result = await (0, openai_1.executePrompt)({
|
|
51
|
+
model,
|
|
52
|
+
temperature: 0.5,
|
|
53
|
+
messages: [
|
|
54
|
+
{
|
|
55
|
+
role: "system",
|
|
56
|
+
content: `Act as a ${dataSource.dbType} query writer. You can only answer with plain query text. Do not write explanations. Always limit queries to max 100 rows. Here is db schema:\n${dbString}`,
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
role: "user",
|
|
60
|
+
content: userPrompt,
|
|
61
|
+
},
|
|
62
|
+
],
|
|
63
|
+
openaiApiKey: openAiToken,
|
|
64
|
+
});
|
|
65
|
+
const query = sanitizeQuery(result.choices[0].message.content);
|
|
66
|
+
return {
|
|
67
|
+
sql: query,
|
|
68
|
+
dataSource,
|
|
69
|
+
};
|
|
70
|
+
};
|
|
71
|
+
exports.generateSqlFromPrompt = generateSqlFromPrompt;
|
|
72
|
+
const runUserSqlPrompt = async (req, datasourceId, userId, userPrompt) => {
|
|
73
|
+
const { dataSource, sql } = await (0, exports.generateSqlFromPrompt)(datasourceId, userId, userPrompt);
|
|
74
|
+
const dbConnectionManager = await (0, connectorManager_1.getDynamicConnection)(dataSource, req);
|
|
75
|
+
const queryBuilder = new sql_builder_1.SQLManipulator(dataSource.dbType, sql);
|
|
76
|
+
queryBuilder
|
|
77
|
+
.setLimit(20);
|
|
78
|
+
return dbConnectionManager.executeQuery(queryBuilder.toExecutableSQL(), {
|
|
79
|
+
type: queryBuilder.getParsed().type,
|
|
80
|
+
allowBulkUpdate: false,
|
|
81
|
+
});
|
|
82
|
+
};
|
|
83
|
+
exports.runUserSqlPrompt = runUserSqlPrompt;
|
|
84
|
+
function handleAlias(value) {
|
|
85
|
+
if (value.includes(" ") && !value.startsWith("'")) {
|
|
86
|
+
return `\`${value}\``;
|
|
87
|
+
}
|
|
88
|
+
return value;
|
|
89
|
+
}
|
|
90
|
+
const runUserSqlQuery = async (req, { table, variables, datasourceId, filters, joins, orderBy, size, page, columns, groupBy }) => {
|
|
91
|
+
const dataSource = await db_1.DataSourceRepository.findOneBy({
|
|
92
|
+
id: datasourceId,
|
|
93
|
+
});
|
|
94
|
+
const tables = [];
|
|
95
|
+
const allColumns = [];
|
|
96
|
+
if (!dataSource) {
|
|
97
|
+
throw new httpError_1.HttpError(404, "Data source not found");
|
|
98
|
+
}
|
|
99
|
+
const queryBuilder = new sql_builder_1.SQLManipulator(dataSource.dbType, "SELECT");
|
|
100
|
+
queryBuilder.setTable(table);
|
|
101
|
+
if (variables) {
|
|
102
|
+
queryBuilder.setParameters(variables);
|
|
103
|
+
}
|
|
104
|
+
if (queryBuilder.getParsed().type !== "SELECT") {
|
|
105
|
+
throw new httpError_1.HttpError(400, "Only SELECT queries are allowed in this endpoint");
|
|
106
|
+
}
|
|
107
|
+
queryBuilder.setLimit(size || 20);
|
|
108
|
+
queryBuilder.setOffset(size * page);
|
|
109
|
+
filters?.forEach((w) => {
|
|
110
|
+
queryBuilder.addWhere(processWhereCondition(w));
|
|
111
|
+
});
|
|
112
|
+
if (joins) {
|
|
113
|
+
queryBuilder.addJoin(...joins);
|
|
114
|
+
}
|
|
115
|
+
if (orderBy) {
|
|
116
|
+
queryBuilder.addOrderBy(...orderBy.map((o) => ({
|
|
117
|
+
...o,
|
|
118
|
+
column: handleAlias(o.column),
|
|
119
|
+
})));
|
|
120
|
+
}
|
|
121
|
+
if (columns && columns.length > 0) {
|
|
122
|
+
queryBuilder.selectColumns(columns);
|
|
123
|
+
}
|
|
124
|
+
if (groupBy && groupBy.length > 0) {
|
|
125
|
+
groupBy.forEach((g) => queryBuilder.addGroupBy(g));
|
|
126
|
+
}
|
|
127
|
+
const parsed = queryBuilder.getParsed();
|
|
128
|
+
if (parsed.table) {
|
|
129
|
+
tables.push(parsed.table);
|
|
130
|
+
}
|
|
131
|
+
if (parsed.joins && parsed.joins.length > 0) {
|
|
132
|
+
parsed.joins.forEach((join) => {
|
|
133
|
+
tables.push(join.table);
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
const info = await db_1.DatabaseInspectionRepository.find({
|
|
137
|
+
where: {
|
|
138
|
+
tableName: (0, typeorm_1.In)(tables),
|
|
139
|
+
datasource: {
|
|
140
|
+
id: datasourceId,
|
|
141
|
+
},
|
|
142
|
+
},
|
|
143
|
+
});
|
|
144
|
+
for (const table of info) {
|
|
145
|
+
if (!table.columns)
|
|
146
|
+
continue;
|
|
147
|
+
for (const column of table.columns) {
|
|
148
|
+
allColumns.push({ column: column.name, table: table.tableName || '' });
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
const dbConnectionManager = await (0, connectorManager_1.getDynamicConnection)(dataSource, req);
|
|
152
|
+
const result = await dbConnectionManager.executeQuery(queryBuilder.toExecutableSQL(), {
|
|
153
|
+
type: queryBuilder.getParsed().type,
|
|
154
|
+
allowBulkUpdate: false,
|
|
155
|
+
});
|
|
156
|
+
return {
|
|
157
|
+
...result,
|
|
158
|
+
tables,
|
|
159
|
+
allColumns,
|
|
160
|
+
};
|
|
161
|
+
};
|
|
162
|
+
exports.runUserSqlQuery = runUserSqlQuery;
|
|
163
|
+
const runRawSqlQuery = async (req, { sql, variables, datasourceId, size, page }) => {
|
|
164
|
+
const dataSource = await db_1.DataSourceRepository.findOneBy({
|
|
165
|
+
id: datasourceId,
|
|
166
|
+
});
|
|
167
|
+
if (!dataSource) {
|
|
168
|
+
throw new httpError_1.HttpError(404, "Data source not found");
|
|
169
|
+
}
|
|
170
|
+
const queryType = (0, rawSql_1.detectQueryType)(sql);
|
|
171
|
+
let query = sql;
|
|
172
|
+
if (queryType === "SELECT") {
|
|
173
|
+
if (query.match(/LIMIT|OFFSET/ig)) {
|
|
174
|
+
throw new httpError_1.HttpError(400, "Queries are automatically paginated, do not use LIMIT and/or OFFSET");
|
|
175
|
+
}
|
|
176
|
+
const finalSize = getSize(size);
|
|
177
|
+
query = (0, rawSql_1.applyRawPagination)(query, finalSize, finalSize * getPage(page));
|
|
178
|
+
}
|
|
179
|
+
if (variables) {
|
|
180
|
+
query = (0, rawSql_1.replaceSqlVariables)(query, variables);
|
|
181
|
+
}
|
|
182
|
+
const dbConnectionManager = await (0, connectorManager_1.getDynamicConnection)(dataSource, req);
|
|
183
|
+
return dbConnectionManager.executeQuery(query, {
|
|
184
|
+
type: queryType,
|
|
185
|
+
allowBulkUpdate: false,
|
|
186
|
+
});
|
|
187
|
+
};
|
|
188
|
+
exports.runRawSqlQuery = runRawSqlQuery;
|
|
189
|
+
function processWhereCondition(where) {
|
|
190
|
+
const w = { ...where };
|
|
191
|
+
switch (w.operator) {
|
|
192
|
+
case "IS NULL":
|
|
193
|
+
case "IS NOT NULL":
|
|
194
|
+
w.value = undefined;
|
|
195
|
+
break;
|
|
196
|
+
case "LIKE":
|
|
197
|
+
case "ILIKE":
|
|
198
|
+
case "NOT LIKE":
|
|
199
|
+
case "NOT ILIKE":
|
|
200
|
+
w.value = w.value?.map((v) => ({
|
|
201
|
+
isColumn: v?.isColumn,
|
|
202
|
+
value: `%${v?.value}%`
|
|
203
|
+
}));
|
|
204
|
+
break;
|
|
205
|
+
}
|
|
206
|
+
return w;
|
|
207
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.HttpError = void 0;
|
|
4
|
+
class HttpError extends Error {
|
|
5
|
+
status;
|
|
6
|
+
message;
|
|
7
|
+
constructor(status, message) {
|
|
8
|
+
super(message);
|
|
9
|
+
this.status = status;
|
|
10
|
+
this.message = message;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
exports.HttpError = HttpError;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.buildDbString = void 0;
|
|
4
|
+
const buildDbString = (dbInspection) => {
|
|
5
|
+
return dbInspection.map((inspection) => {
|
|
6
|
+
// todo: FK
|
|
7
|
+
const columns = inspection.columns?.map((column) => `${column.name}: ${column.type}`).join(", ");
|
|
8
|
+
return `${inspection.tableName}: (${columns})`;
|
|
9
|
+
}).join("\n");
|
|
10
|
+
};
|
|
11
|
+
exports.buildDbString = buildDbString;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseOrderQueryParam = parseOrderQueryParam;
|
|
4
|
+
function parseOrderQueryParam(orderBy, defaultOrder = undefined) {
|
|
5
|
+
try {
|
|
6
|
+
if (orderBy) {
|
|
7
|
+
const parts = orderBy.split("&");
|
|
8
|
+
const order = {};
|
|
9
|
+
for (const part of parts) {
|
|
10
|
+
const split = part.split(":");
|
|
11
|
+
order[split[0]] = split[1];
|
|
12
|
+
}
|
|
13
|
+
return order;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
catch (e) { }
|
|
17
|
+
return defaultOrder;
|
|
18
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.applyRawPagination = applyRawPagination;
|
|
4
|
+
exports.replaceSqlVariables = replaceSqlVariables;
|
|
5
|
+
exports.detectQueryType = detectQueryType;
|
|
6
|
+
const httpError_1 = require("./httpError");
|
|
7
|
+
function applyRawPagination(sql, limit, offset) {
|
|
8
|
+
const sqlWithoutSemicolon = sql.replace(/;\s*$/, '');
|
|
9
|
+
return `${sqlWithoutSemicolon.trim()} LIMIT ${limit} OFFSET ${offset}`;
|
|
10
|
+
}
|
|
11
|
+
function replaceSqlVariables(sql, values) {
|
|
12
|
+
return sql.replace(/:([a-zA-Z_][a-zA-Z0-9_]*)/g, (match, key) => {
|
|
13
|
+
if (!(key in values)) {
|
|
14
|
+
throw new Error(`Missing value for SQL variable: ${key}`);
|
|
15
|
+
}
|
|
16
|
+
return String(values[key]); // Raw replacement (no escaping/quoting)
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
// todo: improve
|
|
20
|
+
function detectQueryType(sql) {
|
|
21
|
+
// Remove leading whitespace and comments
|
|
22
|
+
const cleanedSql = sql
|
|
23
|
+
.replace(/--.*$/gm, '') // Remove single-line comments
|
|
24
|
+
.replace(/\/\*[\s\S]*?\*\//g, '') // Remove block comments
|
|
25
|
+
.trim();
|
|
26
|
+
// Match the first keyword after any optional opening parentheses (e.g., CTEs)
|
|
27
|
+
const match = cleanedSql.match(/^\s*(\(?\s*)*([a-zA-Z]+)/);
|
|
28
|
+
if (!match) {
|
|
29
|
+
throw new httpError_1.HttpError(400, "Failed to detect query type");
|
|
30
|
+
}
|
|
31
|
+
return match[2].toUpperCase();
|
|
32
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getRequestUserId = exports.getRequestUserToken = exports.getRequestParams = exports.getRequestQuery = exports.getRequestPayload = void 0;
|
|
4
|
+
const getRequestPayload = (request, validator) => {
|
|
5
|
+
const payload = request.body;
|
|
6
|
+
if (validator) {
|
|
7
|
+
validator(payload);
|
|
8
|
+
}
|
|
9
|
+
return payload;
|
|
10
|
+
};
|
|
11
|
+
exports.getRequestPayload = getRequestPayload;
|
|
12
|
+
const getRequestQuery = (request, validator) => {
|
|
13
|
+
const query = request.query;
|
|
14
|
+
if (validator) {
|
|
15
|
+
validator(query);
|
|
16
|
+
}
|
|
17
|
+
return query;
|
|
18
|
+
};
|
|
19
|
+
exports.getRequestQuery = getRequestQuery;
|
|
20
|
+
const getRequestParams = (request, validator) => {
|
|
21
|
+
const params = request.params;
|
|
22
|
+
if (validator) {
|
|
23
|
+
validator(params);
|
|
24
|
+
}
|
|
25
|
+
return params;
|
|
26
|
+
};
|
|
27
|
+
exports.getRequestParams = getRequestParams;
|
|
28
|
+
const getRequestUserToken = (request) => {
|
|
29
|
+
return request.headers["phoenix-user-token"];
|
|
30
|
+
};
|
|
31
|
+
exports.getRequestUserToken = getRequestUserToken;
|
|
32
|
+
const getRequestUserId = (request) => {
|
|
33
|
+
return request.headers["phoenix-user-id"];
|
|
34
|
+
};
|
|
35
|
+
exports.getRequestUserId = getRequestUserId;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.sanitizeOpenAIToken = void 0;
|
|
4
|
+
const PLACEHOLDER = "********************************************";
|
|
5
|
+
const sanitizeOpenAIToken = (token) => {
|
|
6
|
+
return token.slice(0, 4) + PLACEHOLDER + token.slice(token.length - 4);
|
|
7
|
+
};
|
|
8
|
+
exports.sanitizeOpenAIToken = sanitizeOpenAIToken;
|
package/dist/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"name":"@dataramen/local-server","version":"0.0.
|
|
1
|
+
{"name":"@dataramen/local-server","version":"0.0.35","license":"MIT","main":"code/proxy.js","dependencies":{"@fastify/cors":"^11.0.1","dotenv":"^16.5.0","fast-glob":"^3.3.3","fastify":"^5.3.2","mysql2":"^3.14.1","pg":"^8.15.6","sqlite3":"^5.1.7","typeorm":"^0.3.23"},"devDependencies":{"fs-extra":"^11.3.0","yargs":"^18.0.0"}}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@dataramen/cli",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.10",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"bin": {
|
|
6
6
|
"dataramen": "bin/run.js"
|
|
@@ -14,5 +14,25 @@
|
|
|
14
14
|
},
|
|
15
15
|
"files": [
|
|
16
16
|
"dist/"
|
|
17
|
+
],
|
|
18
|
+
"keywords": [
|
|
19
|
+
"cli",
|
|
20
|
+
"sql",
|
|
21
|
+
"postgresql",
|
|
22
|
+
"mysql",
|
|
23
|
+
"database",
|
|
24
|
+
"schema",
|
|
25
|
+
"query-builder",
|
|
26
|
+
"data-browser",
|
|
27
|
+
"db-explorer",
|
|
28
|
+
"crud",
|
|
29
|
+
"data-inspector",
|
|
30
|
+
"visual-sql",
|
|
31
|
+
"no-code-sql",
|
|
32
|
+
"dataramen",
|
|
33
|
+
"nodejs",
|
|
34
|
+
"pm2",
|
|
35
|
+
"local-server",
|
|
36
|
+
"interactive-sql"
|
|
17
37
|
]
|
|
18
38
|
}
|