@teleporthq/teleport-plugin-next-data-source 0.42.7 → 0.42.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/__tests__/fetchers.test.ts +2 -2
  2. package/dist/cjs/fetchers/mariadb.d.ts.map +1 -1
  3. package/dist/cjs/fetchers/mariadb.js +9 -3
  4. package/dist/cjs/fetchers/mariadb.js.map +1 -1
  5. package/dist/cjs/fetchers/mongodb.d.ts.map +1 -1
  6. package/dist/cjs/fetchers/mongodb.js +1 -1
  7. package/dist/cjs/fetchers/mongodb.js.map +1 -1
  8. package/dist/cjs/fetchers/mysql.d.ts.map +1 -1
  9. package/dist/cjs/fetchers/mysql.js +2 -2
  10. package/dist/cjs/fetchers/mysql.js.map +1 -1
  11. package/dist/cjs/fetchers/postgresql.d.ts.map +1 -1
  12. package/dist/cjs/fetchers/postgresql.js +4 -4
  13. package/dist/cjs/fetchers/postgresql.js.map +1 -1
  14. package/dist/cjs/fetchers/redshift.d.ts.map +1 -1
  15. package/dist/cjs/fetchers/redshift.js +3 -3
  16. package/dist/cjs/fetchers/redshift.js.map +1 -1
  17. package/dist/cjs/fetchers/turso.d.ts.map +1 -1
  18. package/dist/cjs/fetchers/turso.js +1 -1
  19. package/dist/cjs/fetchers/turso.js.map +1 -1
  20. package/dist/cjs/tsconfig.tsbuildinfo +1 -1
  21. package/dist/esm/fetchers/mariadb.d.ts.map +1 -1
  22. package/dist/esm/fetchers/mariadb.js +9 -3
  23. package/dist/esm/fetchers/mariadb.js.map +1 -1
  24. package/dist/esm/fetchers/mongodb.d.ts.map +1 -1
  25. package/dist/esm/fetchers/mongodb.js +1 -1
  26. package/dist/esm/fetchers/mongodb.js.map +1 -1
  27. package/dist/esm/fetchers/mysql.d.ts.map +1 -1
  28. package/dist/esm/fetchers/mysql.js +2 -2
  29. package/dist/esm/fetchers/mysql.js.map +1 -1
  30. package/dist/esm/fetchers/postgresql.d.ts.map +1 -1
  31. package/dist/esm/fetchers/postgresql.js +4 -4
  32. package/dist/esm/fetchers/postgresql.js.map +1 -1
  33. package/dist/esm/fetchers/redshift.d.ts.map +1 -1
  34. package/dist/esm/fetchers/redshift.js +3 -3
  35. package/dist/esm/fetchers/redshift.js.map +1 -1
  36. package/dist/esm/fetchers/turso.d.ts.map +1 -1
  37. package/dist/esm/fetchers/turso.js +1 -1
  38. package/dist/esm/fetchers/turso.js.map +1 -1
  39. package/dist/esm/tsconfig.tsbuildinfo +1 -1
  40. package/package.json +2 -2
  41. package/src/fetchers/mariadb.ts +47 -8
  42. package/src/fetchers/mongodb.ts +5 -1
  43. package/src/fetchers/mysql.ts +24 -13
  44. package/src/fetchers/postgresql.ts +29 -16
  45. package/src/fetchers/redshift.ts +17 -13
  46. package/src/fetchers/turso.ts +5 -1
@@ -367,7 +367,7 @@ describe('generateDataSourceFetcher', () => {
367
367
  const dataSource = createPostgreSQLDataSource('ds-1')
368
368
  const code = generateDataSourceFetcher(dataSource, 'users')
369
369
 
370
- expect(code).toContain('import { Pool } from')
370
+ expect(code).toContain('import { Client } from')
371
371
  expect(code).toContain('export default async function handler')
372
372
  expect(code).toContain('SELECT * FROM users')
373
373
  expect(code).toContain('localhost')
@@ -438,7 +438,7 @@ describe('generateDataSourceFetcher', () => {
438
438
  }
439
439
  const code = generateDataSourceFetcher(dataSource, 'users')
440
440
 
441
- expect(code).toContain('Pool')
441
+ expect(code).toContain('Client')
442
442
  })
443
443
 
444
444
  it('handles TiDB as MySQL', () => {
@@ -1 +1 @@
1
- {"version":3,"file":"mariadb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAuIF,CAAA;AAED,eAAO,MAAM,2BAA2B,WAC9B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAyEF,CAAA"}
1
+ {"version":3,"file":"mariadb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAyIF,CAAA;AAED,eAAO,MAAM,2BAA2B,WAC9B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA8GF,CAAA"}
@@ -5,19 +5,25 @@ var utils_1 = require("../utils");
5
5
  var generateMariaDBFetcher = function (config, tableName) {
6
6
  var mariaConfig = config;
7
7
  var database = mariaConfig.database;
8
- return "import mariadb from 'mariadb'\n\nexport default async function handler(req, res) {\n let pool = null\n try {\n pool = mariadb.createPool({\n host: ".concat(JSON.stringify(mariaConfig.host), ",\n port: ").concat(mariaConfig.port || 3306, ",\n user: ").concat(JSON.stringify(mariaConfig.user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(mariaConfig.password), ",\n database: ").concat(JSON.stringify(mariaConfig.database), ",\n ssl: ").concat(mariaConfig.ssl || false).concat(mariaConfig.sslConfig
8
+ return "import mariadb from 'mariadb'\n\nexport default async function handler(req, res) {\n let connection = null\n try {\n connection = await mariadb.createConnection({\n host: ".concat(JSON.stringify(mariaConfig.host), ",\n port: ").concat(mariaConfig.port || 3306, ",\n user: ").concat(JSON.stringify(mariaConfig.user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(mariaConfig.password), ",\n database: ").concat(JSON.stringify(mariaConfig.database), ",\n ssl: ").concat(mariaConfig.ssl || false).concat(mariaConfig.sslConfig
9
9
  ? ",\n sslConfig: {\n ".concat(mariaConfig.sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.ca), ",") : '', "\n ").concat(mariaConfig.sslConfig.cert
10
10
  ? "cert: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.cert), ",")
11
11
  : '', "\n ").concat(mariaConfig.sslConfig.key
12
12
  ? "key: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.key), ",")
13
13
  : '', "\n rejectUnauthorized: ").concat(mariaConfig.sslConfig.rejectUnauthorized !== false, "\n }")
14
- : '', "\n })\n \n const connection = await pool.getConnection()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(\\`${col}\\` AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`\\`${key}\\` IN (${placeholders})`)\n } else {\n conditions.push(`\\`${key}\\` = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM \\`").concat(tableName, "\\``\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY \\`${sortBy}\\` ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const rows = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n connection.release()\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MariaDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (pool) {\n await pool.end()\n }\n }\n}\n");
14
+ : '', "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(\\`${col}\\` AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`\\`${key}\\` IN (${placeholders})`)\n } else {\n conditions.push(`\\`${key}\\` = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM \\`").concat(tableName, "\\``\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY \\`${sortBy}\\` ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const rows = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MariaDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MariaDB connection:', error)\n }\n }\n }\n}\n");
15
15
  };
16
16
  exports.generateMariaDBFetcher = generateMariaDBFetcher;
17
17
  var generateMariaDBCountFetcher = function (config, tableName) {
18
18
  var mariaConfig = config;
19
19
  var database = mariaConfig.database;
20
- return "\nasync function getCount(req, res) {\n const connection = getConnection()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [".concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `CAST(${col} AS CHAR) LIKE ?`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} ?`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) as count FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const [rows] = await connection.execute(countSql, queryParams)\n const count = rows[0].count\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
20
+ return "\nasync function getCount(req, res) {\n let connection = null\n\n try {\n connection = await mariadb.createConnection({\n host: ".concat(JSON.stringify(mariaConfig.host), ",\n port: ").concat(mariaConfig.port || 3306, ",\n user: ").concat(JSON.stringify(mariaConfig.user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(mariaConfig.password), ",\n database: ").concat(JSON.stringify(mariaConfig.database), ",\n ssl: ").concat(mariaConfig.ssl || false).concat(mariaConfig.sslConfig
21
+ ? ",\n sslConfig: {\n ".concat(mariaConfig.sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.ca), ",") : '', "\n ").concat(mariaConfig.sslConfig.cert
22
+ ? "cert: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.cert), ",")
23
+ : '', "\n ").concat(mariaConfig.sslConfig.key
24
+ ? "key: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.key), ",")
25
+ : '', "\n rejectUnauthorized: ").concat(mariaConfig.sslConfig.rejectUnauthorized !== false, "\n }")
26
+ : '', "\n })\n \n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `CAST(${col} AS CHAR) LIKE ?`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} ?`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) as count FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const rows = await connection.query(countSql, queryParams)\n const count = rows[0].count\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MariaDB connection:', error)\n }\n }\n }\n}\n");
21
27
  };
22
28
  exports.generateMariaDBCountFetcher = generateMariaDBCountFetcher;
23
29
  //# sourceMappingURL=mariadb.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"mariadb.js","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,uKAMK,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,4BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,4BACxB,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,gCAC5B,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,gCAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,2BACzC,WAAW,CAAC,GAAG,IAAI,KAAK,SACjC,WAAW,CAAC,SAAS;QACnB,CAAC,CAAC,yCAGE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,uBAG1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,uBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,2CAEc,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,KAAK,cACxE;QACF,CAAC,CAAC,EAAE,4tBAuBK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,kkCA8BjC,SAAS,8xCA+C5C,CAAA;AACD,CAAC,CAAA;AA1IY,QAAA,sBAAsB,0BA0IlC;AAEM,IAAM,2BAA2B,GAAG,UACzC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,sxBAsBM,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwBhB,SAAS,yiBAsB7D,CAAA;AACD,CAAC,CAAA;AA5EY,QAAA,2BAA2B,+BA4EvC"}
1
+ {"version":3,"file":"mariadb.js","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,+LAMK,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,4BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,4BACxB,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,gCAC5B,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,gCAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,2BACzC,WAAW,CAAC,GAAG,IAAI,KAAK,SACjC,WAAW,CAAC,SAAS;QACnB,CAAC,CAAC,yCAGE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,uBAG1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,uBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,2CAEc,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,KAAK,cACxE;QACF,CAAC,CAAC,EAAE,yqBAsBK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,kkCA8BjC,SAAS,o4CAkD5C,CAAA;AACD,CAAC,CAAA;AA5IY,QAAA,sBAAsB,0BA4IlC;AAEM,IAAM,2BAA2B,GAAG,UACzC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,oJAMK,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,4BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,4BACxB,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,gCAC5B,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,gCAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,2BACzC,WAAW,CAAC,GAAG,IAAI,KAAK,SACjC,WAAW,CAAC,SAAS;QACnB,CAAC,CAAC,yCAGE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,uBAG1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,uBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,2CAEc,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,KAAK,cACxE;QACF,CAAC,CAAC,EAAE,itBAqBK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwBhB,SAAS,ouBA8B7D,CAAA;AACD,CAAC,CAAA;AAjHY,QAAA,2BAA2B,+BAiHvC"}
@@ -1 +1 @@
1
- {"version":3,"file":"mongodb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAoCpC,CAAA;AAWD,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAwHF,CAAA;AAGD,eAAO,MAAM,2BAA2B,YAAa,GAAG,aAAa,MAAM,KAAG,MA6D7E,CAAA"}
1
+ {"version":3,"file":"mongodb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAoCpC,CAAA;AAWD,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA4HF,CAAA;AAGD,eAAO,MAAM,2BAA2B,YAAa,GAAG,aAAa,MAAM,KAAG,MA6D7E,CAAA"}
@@ -41,7 +41,7 @@ var generateMongoDBFetcher = function (config, tableName) {
41
41
  if (!connectionString) {
42
42
  connectionString = "mongodb://".concat(hasUsername ? "".concat(mongoConfig.username, ":").concat(mongoConfig.password, "@") : '').concat(mongoConfig.host, ":").concat(mongoConfig.port || 27017, "/").concat(database);
43
43
  }
44
- return "import { MongoClient, ObjectId } from 'mongodb'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n const url = ".concat((0, utils_1.replaceSecretReference)(connectionString), "\n client = new MongoClient(url, {\n connectTimeoutMS: 30000,\n serverSelectionTimeoutMS: 30000\n })\n \n await client.connect()\n const db = client.db(").concat(JSON.stringify(database), ")\n const collection = db.collection('").concat(tableName, "')\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const filter = {}\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all field names from a sample document\n try {\n const sampleDoc = await db.collection(").concat(JSON.stringify(tableName), ").findOne({})\n if (sampleDoc) {\n columns = Object.keys(sampleDoc).filter(key => key !== '_id')\n }\n } catch (schemaError) {\n console.warn('Failed to fetch sample document for column names:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const orConditions = columns.map((col) => ({\n [col]: { $regex: query, $options: 'i' }\n }))\n filter.$or = orConditions\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (key === '_id') {\n if (Array.isArray(value)) {\n filter[key] = {\n $in: value.map((id) => (typeof id === 'string' ? new ObjectId(id) : id))\n }\n } else if (typeof value === 'string') {\n filter[key] = new ObjectId(value)\n } else {\n filter[key] = value\n }\n } else if (Array.isArray(value)) {\n filter[key] = { $in: value }\n } else {\n filter[key] = value\n }\n })\n }\n \n let cursor = collection.find(filter)\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n cursor = cursor.sort({ [sortBy]: sortOrderValue })\n }\n \n const limitValue = limit || perPage\n const skipValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (skipValue !== undefined) {\n cursor = cursor.skip(skipValue)\n }\n \n if (limitValue) {\n cursor = cursor.limit(parseInt(limitValue))\n }\n \n const documents = await cursor.toArray()\n const safeData = JSON.parse(JSON.stringify(documents))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MongoDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n await client.close()\n }\n }\n}\n");
44
+ return "import { MongoClient, ObjectId } from 'mongodb'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n const url = ".concat((0, utils_1.replaceSecretReference)(connectionString), "\n client = new MongoClient(url, {\n connectTimeoutMS: 30000,\n serverSelectionTimeoutMS: 30000\n })\n \n await client.connect()\n const db = client.db(").concat(JSON.stringify(database), ")\n const collection = db.collection('").concat(tableName, "')\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const filter = {}\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all field names from a sample document\n try {\n const sampleDoc = await db.collection(").concat(JSON.stringify(tableName), ").findOne({})\n if (sampleDoc) {\n columns = Object.keys(sampleDoc).filter(key => key !== '_id')\n }\n } catch (schemaError) {\n console.warn('Failed to fetch sample document for column names:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const orConditions = columns.map((col) => ({\n [col]: { $regex: query, $options: 'i' }\n }))\n filter.$or = orConditions\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (key === '_id') {\n if (Array.isArray(value)) {\n filter[key] = {\n $in: value.map((id) => (typeof id === 'string' ? new ObjectId(id) : id))\n }\n } else if (typeof value === 'string') {\n filter[key] = new ObjectId(value)\n } else {\n filter[key] = value\n }\n } else if (Array.isArray(value)) {\n filter[key] = { $in: value }\n } else {\n filter[key] = value\n }\n })\n }\n \n let cursor = collection.find(filter)\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n cursor = cursor.sort({ [sortBy]: sortOrderValue })\n }\n \n const limitValue = limit || perPage\n const skipValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (skipValue !== undefined) {\n cursor = cursor.skip(skipValue)\n }\n \n if (limitValue) {\n cursor = cursor.limit(parseInt(limitValue))\n }\n \n const documents = await cursor.toArray()\n const safeData = JSON.parse(JSON.stringify(documents))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MongoDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.close()\n } catch (error) {\n console.error('Error closing MongoDB client:', error)\n }\n }\n }\n}\n");
45
45
  };
46
46
  exports.generateMongoDBFetcher = generateMongoDBFetcher;
47
47
  // tslint:disable-next-line:variable-name
@@ -1 +1 @@
1
- {"version":3,"file":"mongodb.js","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,qBAAqB,GAAG,UACnC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,+CAA+C;IAC/C,IAAI,MAAM,CAAC,gBAAgB,EAAE;QAC3B,IAAI,OAAO,MAAM,CAAC,gBAAgB,KAAK,QAAQ,IAAI,MAAM,CAAC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8CAA8C,EAAE,CAAA;SACjF;QAED,uFAAuF;QACvF,IAAM,OAAO,GAAG,MAAM,CAAC,gBAA0B,CAAA;QACjD,IACE,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,CAAC;YAC1C,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC;YACjC,CAAC,OAAO,CAAC,UAAU,CAAC,gBAAgB,CAAC,EACrC;YACA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,0CAA0C,EAAE,CAAA;SAC7E;QAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;KACzB;IAED,2EAA2E;IAC3E,yEAAyE;IACzE,uCAAuC;IACvC,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;QAChE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAA;KAC1D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAtCY,QAAA,qBAAqB,yBAsCjC;AAWM,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,WAAW,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IACzC,IAAM,QAAQ,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IAEtC,qDAAqD;IACrD,IAAI,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAA;IACnD,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,oBACjB,WAAW,CAAC,CAAC,CAAC,UAAG,WAAW,CAAC,QAAQ,cAAI,WAAW,CAAC,QAAQ,MAAG,CAAC,CAAC,CAAC,EAAE,SACpE,WAAW,CAAC,IAAI,cAAI,WAAW,CAAC,IAAI,IAAI,KAAK,cAAI,QAAQ,CAAE,CAAA;KAC/D;IAED,OAAO,8JAKS,IAAA,8BAAsB,EAAC,gBAAgB,CAAC,8LAO/B,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,sDACX,SAAS,8cAeC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,uxEA8E1E,CAAA;AACD,CAAC,CAAA;AA3HY,QAAA,sBAAsB,0BA2HlC;AAED,yCAAyC;AAClC,IAAM,2BAA2B,GAAG,UAAC,OAAY,EAAE,SAAiB;IACzE,OAAO,kNAO+B,SAAS,i+CAoDhD,CAAA;AACD,CAAC,CAAA;AA7DY,QAAA,2BAA2B,+BA6DvC"}
1
+ {"version":3,"file":"mongodb.js","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,qBAAqB,GAAG,UACnC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,+CAA+C;IAC/C,IAAI,MAAM,CAAC,gBAAgB,EAAE;QAC3B,IAAI,OAAO,MAAM,CAAC,gBAAgB,KAAK,QAAQ,IAAI,MAAM,CAAC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8CAA8C,EAAE,CAAA;SACjF;QAED,uFAAuF;QACvF,IAAM,OAAO,GAAG,MAAM,CAAC,gBAA0B,CAAA;QACjD,IACE,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,CAAC;YAC1C,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC;YACjC,CAAC,OAAO,CAAC,UAAU,CAAC,gBAAgB,CAAC,EACrC;YACA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,0CAA0C,EAAE,CAAA;SAC7E;QAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;KACzB;IAED,2EAA2E;IAC3E,yEAAyE;IACzE,uCAAuC;IACvC,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;QAChE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAA;KAC1D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAtCY,QAAA,qBAAqB,yBAsCjC;AAWM,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,WAAW,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IACzC,IAAM,QAAQ,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IAEtC,qDAAqD;IACrD,IAAI,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAA;IACnD,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,oBACjB,WAAW,CAAC,CAAC,CAAC,UAAG,WAAW,CAAC,QAAQ,cAAI,WAAW,CAAC,QAAQ,MAAG,CAAC,CAAC,CAAC,EAAE,SACpE,WAAW,CAAC,IAAI,cAAI,WAAW,CAAC,IAAI,IAAI,KAAK,cAAI,QAAQ,CAAE,CAAA;KAC/D;IAED,OAAO,8JAKS,IAAA,8BAAsB,EAAC,gBAAgB,CAAC,8LAO/B,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,sDACX,SAAS,8cAeC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,u4EAkF1E,CAAA;AACD,CAAC,CAAA;AA/HY,QAAA,sBAAsB,0BA+HlC;AAED,yCAAyC;AAClC,IAAM,2BAA2B,GAAG,UAAC,OAAY,EAAE,SAAiB;IACzE,OAAO,kNAO+B,SAAS,i+CAoDhD,CAAA;AACD,CAAC,CAAA;AA7DY,QAAA,2BAA2B,+BA6DvC"}
@@ -1 +1 @@
1
- {"version":3,"file":"mysql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA+IF,CAAA;AAED,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAwEF,CAAA"}
1
+ {"version":3,"file":"mysql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkJF,CAAA;AAED,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAgFF,CAAA"}
@@ -19,12 +19,12 @@ var generateMySQLFetcher = function (config, tableName) {
19
19
  : defaultSSLEnabled
20
20
  ? "{ rejectUnauthorized: true }"
21
21
  : 'false';
22
- return "import mysql from 'mysql2/promise'\n\nlet pool = null\n\nconst getPool = () => {\n if (pool) return pool\n \n pool = mysql.createPool({\n host: ".concat(JSON.stringify(mysqlConfig.host), ",\n port: ").concat(mysqlConfig.port || 3306, ",\n user: ").concat(resolvedUser !== null ? JSON.stringify(resolvedUser) : 'undefined', ",\n password: ").concat((0, utils_1.replaceSecretReference)(mysqlConfig.password), ",\n database: ").concat(JSON.stringify(mysqlConfig.database), ",\n ssl: ").concat(sslConfigString, "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await pool.promise().query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(${mysql.escapeId(col)} AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`${mysql.escapeId(key)} IN (${placeholders})`)\n } else {\n conditions.push(`${mysql.escapeId(key)} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM ${mysql.escapeId('").concat(tableName, "')}`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${mysql.escapeId(sortBy)} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const [rows] = await pool.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MySQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
22
+ return "import mysql from 'mysql2/promise'\n\nconst getConnection = () => {\n return mysql.createConnection({\n host: ".concat(JSON.stringify(mysqlConfig.host), ",\n port: ").concat(mysqlConfig.port || 3306, ",\n user: ").concat(resolvedUser !== null ? JSON.stringify(resolvedUser) : 'undefined', ",\n password: ").concat((0, utils_1.replaceSecretReference)(mysqlConfig.password), ",\n database: ").concat(JSON.stringify(mysqlConfig.database), ",\n ssl: ").concat(sslConfigString, "\n })\n}\n\nexport default async function handler(req, res) {\n const connection = await getConnection()\n \n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(${mysql.escapeId(col)} AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`${mysql.escapeId(key)} IN (${placeholders})`)\n } else {\n conditions.push(`${mysql.escapeId(key)} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM ${mysql.escapeId('").concat(tableName, "')}`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${mysql.escapeId(sortBy)} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const [rows] = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MySQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MySQL connection:', error)\n }\n }\n }\n}\n");
23
23
  };
24
24
  exports.generateMySQLFetcher = generateMySQLFetcher;
25
25
  var generateMySQLCountFetcher = function (config, tableName) {
26
26
  var mysqlConfig = config;
27
- return "\nasync function getCount(req, res) {\n const connection = getConnection()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await connection.execute(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [".concat(JSON.stringify(mysqlConfig.database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `CAST(${col} AS CHAR) LIKE ?`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} ?`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) as count FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const [rows] = await connection.execute(countSql, queryParams)\n const count = rows[0].count\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
27
+ return "\nasync function getCount(req, res) {\n const connection = await getConnection()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await connection.execute(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [".concat(JSON.stringify(mysqlConfig.database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `CAST(${col} AS CHAR) LIKE ?`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} ?`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) as count FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const [rows] = await connection.query(countSql, queryParams)\n const count = rows[0].count\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MySQL connection:', error)\n }\n }\n }\n}\n");
28
28
  };
29
29
  exports.generateMySQLCountFetcher = generateMySQLCountFetcher;
30
30
  //# sourceMappingURL=mysql.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"mysql.js","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,YAAY,GAAG,WAAW,CAAC,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,IAAI,CAAA;IACrE,IAAM,kBAAkB,GAAG,CAAC,CAAC,WAAW,CAAC,SAAS,CAAA;IAClD,IAAM,iBAAiB,GAAG,WAAW,CAAC,GAAG,KAAK,KAAK,CAAA;IACnD,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,IAAM,eAAe,GAAG,kBAAkB;QACxC,CAAC,CAAC,mBACE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAE1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,qBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,yCAGN,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,SAAS;YACpD,CAAC,CAAC,WAAW,CAAC,SAAS,CAAC,kBAAkB;YAC1C,CAAC,CAAC,IAAI,YAEV;QACF,CAAC,CAAC,iBAAiB;YACnB,CAAC,CAAC,8BAA8B;YAChC,CAAC,CAAC,OAAO,CAAA;IAEX,OAAO,gKAQG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,0BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,0BACxB,YAAY,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,8BAC9D,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,8BAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,yBACzC,eAAe,uxBA2BX,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+mCA8BlB,SAAS,qsCA0C3D,CAAA;AACD,CAAC,CAAA;AAlJY,QAAA,oBAAoB,wBAkJhC;AAEM,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IAEzC,OAAO,0xBAsBM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwB5B,SAAS,yiBAsB7D,CAAA;AACD,CAAC,CAAA;AA3EY,QAAA,yBAAyB,6BA2ErC"}
1
+ {"version":3,"file":"mysql.js","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,YAAY,GAAG,WAAW,CAAC,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,IAAI,CAAA;IACrE,IAAM,kBAAkB,GAAG,CAAC,CAAC,WAAW,CAAC,SAAS,CAAA;IAClD,IAAM,iBAAiB,GAAG,WAAW,CAAC,GAAG,KAAK,KAAK,CAAA;IACnD,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,IAAM,eAAe,GAAG,kBAAkB;QACxC,CAAC,CAAC,mBACE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAE1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,qBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,yCAGN,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,SAAS;YACpD,CAAC,CAAC,WAAW,CAAC,SAAS,CAAC,kBAAkB;YAC1C,CAAC,CAAC,IAAI,YAEV;QACF,CAAC,CAAC,iBAAiB;YACnB,CAAC,CAAC,8BAA8B;YAChC,CAAC,CAAC,OAAO,CAAA;IAEX,OAAO,4HAIG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,0BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,0BACxB,YAAY,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,8BAC9D,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,8BAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,yBACzC,eAAe,oxBA0BX,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+mCA8BlB,SAAS,w4CAkD3D,CAAA;AACD,CAAC,CAAA;AArJY,QAAA,oBAAoB,wBAqJhC;AAEM,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IAEzC,OAAO,gyBAsBM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwB5B,SAAS,ouBA8B7D,CAAA;AACD,CAAC,CAAA;AAnFY,QAAA,yBAAyB,6BAmFrC"}
@@ -1 +1 @@
1
- {"version":3,"file":"postgresql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":"AAcA,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA8IF,CAAA;AAED,eAAO,MAAM,8BAA8B,WACjC,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkFF,CAAA"}
1
+ {"version":3,"file":"postgresql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":"AAcA,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkJF,CAAA;AAED,eAAO,MAAM,8BAA8B,WACjC,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA2FF,CAAA"}
@@ -6,20 +6,20 @@ var generatePostgreSQLFetcher = function (config, tableName) {
6
6
  var _a;
7
7
  var pgConfig = config;
8
8
  var schema = (_a = pgConfig.options) === null || _a === void 0 ? void 0 : _a.schema;
9
- return "import { Pool } from 'pg'\n\nlet pool = null\n\nconst getPool = () => {\n if (pool) return pool\n \n pool = new Pool({\n host: ".concat(JSON.stringify(pgConfig.host), ",\n port: ").concat(pgConfig.port || 5432, ",\n user: ").concat(JSON.stringify(pgConfig.user || pgConfig.username), ",\n password: ").concat((0, utils_1.replaceSecretReference)(pgConfig.password), ",\n database: ").concat(JSON.stringify(pgConfig.database), ",\n ssl: ").concat(pgConfig.ssl === false
9
+ return "import { Client } from 'pg'\n\nconst getClient = () => {\n return new Client({\n host: ".concat(JSON.stringify(pgConfig.host), ",\n port: ").concat(pgConfig.port || 5432, ",\n user: ").concat(JSON.stringify(pgConfig.user || pgConfig.username), ",\n password: ").concat((0, utils_1.replaceSecretReference)(pgConfig.password), ",\n database: ").concat(JSON.stringify(pgConfig.database), ",\n ssl: ").concat(pgConfig.ssl === false
10
10
  ? 'false'
11
11
  : pgConfig.sslConfig
12
12
  ? "{\n ".concat(pgConfig.sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(pgConfig.sslConfig.ca), ",") : '', "\n ").concat(pgConfig.sslConfig.cert ? "cert: ".concat((0, utils_1.replaceSecretReference)(pgConfig.sslConfig.cert), ",") : '', "\n ").concat(pgConfig.sslConfig.key ? "key: ".concat((0, utils_1.replaceSecretReference)(pgConfig.sslConfig.key), ",") : '', "\n rejectUnauthorized: false\n }")
13
- : '{ rejectUnauthorized: false }', "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n ").concat(schema ? "await pool.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ").concat(schema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = schema \n ? [").concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]\n : [").concat(JSON.stringify(tableName), "]\n \n const schemaResult = await pool.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await pool.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('PostgreSQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
13
+ : '{ rejectUnauthorized: false }', "\n })\n}\n\nexport default async function handler(req, res) {\n const client = getClient()\n \n try {\n await client.connect()\n ").concat(schema ? "await client.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ").concat(schema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = schema \n ? [").concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]\n : [").concat(JSON.stringify(tableName), "]\n \n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('PostgreSQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing PostgreSQL client:', error)\n }\n }\n }\n}\n");
14
14
  };
15
15
  exports.generatePostgreSQLFetcher = generatePostgreSQLFetcher;
16
16
  var generatePostgreSQLCountFetcher = function (config, tableName) {
17
17
  var _a;
18
18
  var pgConfig = config;
19
19
  var hasSchema = !!((_a = pgConfig.options) === null || _a === void 0 ? void 0 : _a.schema);
20
- return "\nasync function getCount(req, res) {\n const pool = getPool()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n let paramIndex = 1\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ".concat(hasSchema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = ").concat(hasSchema
20
+ return "\nasync function getCount(req, res) {\n const client = getClient()\n\n try {\n await client.connect()\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n let paramIndex = 1\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ".concat(hasSchema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = ").concat(hasSchema
21
21
  ? "[".concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(pgConfig.options.schema), "]")
22
- : "[".concat(JSON.stringify(tableName), "]"), "\n \n const schemaResult = await pool.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `${col}::text ILIKE $${paramIndex++}`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} $${paramIndex++}`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const result = await pool.query(countSql, queryParams)\n const count = parseInt(result.rows[0].count, 10)\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
22
+ : "[".concat(JSON.stringify(tableName), "]"), "\n \n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `${col}::text ILIKE $${paramIndex++}`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} $${paramIndex++}`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const result = await client.query(countSql, queryParams)\n const count = parseInt(result.rows[0].count, 10)\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing PostgreSQL client:', error)\n }\n }\n }\n}\n");
23
23
  };
24
24
  exports.generatePostgreSQLCountFetcher = generatePostgreSQLCountFetcher;
25
25
  //# sourceMappingURL=postgresql.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"postgresql.js","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAc1C,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,MAAM,GAAG,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA;IAEvC,OAAO,+IAQG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,0BAC7B,QAAQ,CAAC,IAAI,IAAI,IAAI,0BACrB,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,QAAQ,CAAC,QAAQ,CAAC,8BAC9C,IAAA,8BAAsB,EAAC,QAAQ,CAAC,QAAQ,CAAC,8BACzC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,yBAE3C,QAAQ,CAAC,GAAG,KAAK,KAAK;QACpB,CAAC,CAAC,OAAO;QACT,CAAC,CAAC,QAAQ,CAAC,SAAS;YACpB,CAAC,CAAC,mBACF,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACpF,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAC1F,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,6CAEzF;YACE,CAAC,CAAC,+BAA+B,yIAUnC,MAAM,CAAC,CAAC,CAAC,+CAAwC,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,gnBAqBxD,MAAM,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,0HAIlC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,+BACpD,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,uxCAoCV,SAAS,8rCA0CxC,CAAA;AACD,CAAC,CAAA;AAjJY,QAAA,yBAAyB,6BAiJrC;AAEM,IAAM,8BAA8B,GAAG,UAC5C,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,SAAS,GAAG,CAAC,CAAC,CAAA,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA,CAAA;IAE5C,OAAO,ytBAuBK,SAAS,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,kGAI1C,SAAS;QACP,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAQ,CAAC,MAAM,CAAC,MAAG;QAC/E,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,87BA0BJ,SAAS,sjBAsBpD,CAAA;AACD,CAAC,CAAA;AArFY,QAAA,8BAA8B,kCAqF1C"}
1
+ {"version":3,"file":"postgresql.js","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAc1C,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,MAAM,GAAG,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA;IAEvC,OAAO,qGAIG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,0BAC7B,QAAQ,CAAC,IAAI,IAAI,IAAI,0BACrB,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,QAAQ,CAAC,QAAQ,CAAC,8BAC9C,IAAA,8BAAsB,EAAC,QAAQ,CAAC,QAAQ,CAAC,8BACzC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,yBAE3C,QAAQ,CAAC,GAAG,KAAK,KAAK;QACpB,CAAC,CAAC,OAAO;QACT,CAAC,CAAC,QAAQ,CAAC,SAAS;YACpB,CAAC,CAAC,mBACF,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACpF,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAC1F,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,6CAEzF;YACE,CAAC,CAAC,+BAA+B,wJAUnC,MAAM,CAAC,CAAC,CAAC,iDAA0C,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,gnBAqB1D,MAAM,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,0HAIlC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,+BACpD,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,yxCAoCV,SAAS,s3CAkDxC,CAAA;AACD,CAAC,CAAA;AArJY,QAAA,yBAAyB,6BAqJrC;AAEM,IAAM,8BAA8B,GAAG,UAC5C,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,SAAS,GAAG,CAAC,CAAC,CAAA,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA,CAAA;IAE5C,OAAO,yvBAwBK,SAAS,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,kGAI1C,SAAS;QACP,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAQ,CAAC,MAAM,CAAC,MAAG;QAC/E,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,g8BA0BJ,SAAS,8uBA8BpD,CAAA;AACD,CAAC,CAAA;AA9FY,QAAA,8BAA8B,kCA8F1C"}
@@ -1 +1 @@
1
- {"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAgJF,CAAA"}
1
+ {"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAoJF,CAAA"}
@@ -13,14 +13,14 @@ var generateRedshiftFetcher = function (config, tableName) {
13
13
  var ssl = redshiftConfig.ssl;
14
14
  var sslConfig = redshiftConfig.sslConfig;
15
15
  var schema = (_a = redshiftConfig.options) === null || _a === void 0 ? void 0 : _a.schema;
16
- return "import { Pool } from 'pg'\n\nlet pool = null\n\nconst getPool = () => {\n if (pool) return pool\n \n pool = new Pool({\n host: ".concat(JSON.stringify(host), ",\n port: ").concat(port || 5439, ",\n user: ").concat(JSON.stringify(user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(password), ",\n database: ").concat(JSON.stringify(database), ",\n ssl: ").concat(ssl === false
16
+ return "import { Client } from 'pg'\n\nconst getClient = () => {\n return new Client({\n host: ".concat(JSON.stringify(host), ",\n port: ").concat(port || 5439, ",\n user: ").concat(JSON.stringify(user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(password), ",\n database: ").concat(JSON.stringify(database), ",\n ssl: ").concat(ssl === false
17
17
  ? '{ rejectUnauthorized: false }'
18
18
  : sslConfig
19
19
  ? "{\n ".concat(sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(sslConfig.ca), ",") : '', "\n ").concat(sslConfig.cert ? "cert: ".concat((0, utils_1.replaceSecretReference)(sslConfig.cert), ",") : '', "\n ").concat(sslConfig.key ? "key: ".concat((0, utils_1.replaceSecretReference)(sslConfig.key), ",") : '', "\n rejectUnauthorized: ").concat(sslConfig.rejectUnauthorized !== false, "\n }")
20
20
  : '{ rejectUnauthorized: false }' // Default to SSL with no cert verification for Redshift
21
- , "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n ").concat(schema ? "await pool.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = 'SELECT column_name FROM information_schema.columns WHERE table_name = $1' + \n ").concat(schema ? "' AND table_schema = $2'" : "''", " + \n ' ORDER BY ordinal_position'\n const schemaParams = ").concat(schema
21
+ , "\n })\n}\n\nexport default async function handler(req, res) {\n const client = getClient()\n \n try {\n await client.connect()\n ").concat(schema ? "await client.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = 'SELECT column_name FROM information_schema.columns WHERE table_name = $1' + \n ").concat(schema ? "' AND table_schema = $2'" : "''", " + \n ' ORDER BY ordinal_position'\n const schemaParams = ").concat(schema
22
22
  ? "[".concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]")
23
- : "[".concat(JSON.stringify(tableName), "]"), "\n const schemaResult = await pool.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await pool.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
23
+ : "[".concat(JSON.stringify(tableName), "]"), "\n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing Redshift client:', error)\n }\n }\n }\n}\n");
24
24
  };
25
25
  exports.generateRedshiftFetcher = generateRedshiftFetcher;
26
26
  //# sourceMappingURL=redshift.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"redshift.js","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,uBAAuB,GAAG,UACrC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,cAAc,GAAG,MAAwB,CAAA;IAC/C,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,GAAG,GAAG,cAAc,CAAC,GAAG,CAAA;IAC9B,IAAM,SAAS,GAAG,cAAc,CAAC,SAAS,CAAA;IAC1C,IAAM,MAAM,GAAG,MAAA,cAAc,CAAC,OAAO,0CAAE,MAAM,CAAA;IAE7C,OAAO,+IAQG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,0BACpB,IAAI,IAAI,IAAI,0BACZ,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,8BAChB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,8BAChC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,yBAElC,GAAG,KAAK,KAAK;QACX,CAAC,CAAC,+BAA+B;QACjC,CAAC,CAAC,SAAS;YACX,CAAC,CAAC,mBACF,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAClE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACxE,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,yCACjD,SAAS,CAAC,kBAAkB,KAAK,KAAK,YAC5D;YACE,CAAC,CAAC,+BAA+B,CAAC,wDAAwD;6IAU5F,MAAM,CAAC,CAAC,CAAC,+CAAwC,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,2oBAiBxD,MAAM,CAAC,CAAC,CAAC,0BAA0B,CAAC,CAAC,CAAC,IAAI,2FAG5C,MAAM;QACJ,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAG;QAC7D,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,4sCAmChB,SAAS,4rCA0CxC,CAAA;AACD,CAAC,CAAA;AAnJY,QAAA,uBAAuB,2BAmJnC"}
1
+ {"version":3,"file":"redshift.js","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,uBAAuB,GAAG,UACrC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,cAAc,GAAG,MAAwB,CAAA;IAC/C,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,GAAG,GAAG,cAAc,CAAC,GAAG,CAAA;IAC9B,IAAM,SAAS,GAAG,cAAc,CAAC,SAAS,CAAA;IAC1C,IAAM,MAAM,GAAG,MAAA,cAAc,CAAC,OAAO,0CAAE,MAAM,CAAA;IAE7C,OAAO,qGAIG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,0BACpB,IAAI,IAAI,IAAI,0BACZ,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,8BAChB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,8BAChC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,yBAElC,GAAG,KAAK,KAAK;QACX,CAAC,CAAC,+BAA+B;QACjC,CAAC,CAAC,SAAS;YACX,CAAC,CAAC,mBACF,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAClE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACxE,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,yCACjD,SAAS,CAAC,kBAAkB,KAAK,KAAK,YAC5D;YACE,CAAC,CAAC,+BAA+B,CAAC,wDAAwD;4JAU5F,MAAM,CAAC,CAAC,CAAC,iDAA0C,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,2oBAiB1D,MAAM,CAAC,CAAC,CAAC,0BAA0B,CAAC,CAAC,CAAC,IAAI,2FAG5C,MAAM;QACJ,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAG;QAC7D,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,8sCAmChB,SAAS,k3CAkDxC,CAAA;AACD,CAAC,CAAA;AAvJY,QAAA,uBAAuB,2BAuJnC"}
@@ -1 +1 @@
1
- {"version":3,"file":"turso.d.ts","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAcpC,CAAA;AASD,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkIF,CAAA"}
1
+ {"version":3,"file":"turso.d.ts","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAcpC,CAAA;AASD,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAsIF,CAAA"}
@@ -19,7 +19,7 @@ var generateTursoFetcher = function (config, tableName) {
19
19
  var tursoConfig = config;
20
20
  var databaseUrl = tursoConfig.databaseUrl;
21
21
  var token = tursoConfig.token;
22
- return "import { createClient } from '@libsql/client'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ".concat(JSON.stringify(databaseUrl), ",\n authToken: ").concat((0, utils_1.replaceSecretReference)(token), "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n const whereClauses = []\n const queryParams = []\n let searchQueryColumns = null\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n const searchConditions = columns.map((col) => `${col} LIKE ?`)\n whereClauses.push(`(${searchConditions.join(' OR ')})`)\n columns.forEach(() => {\n queryParams.push(`%${query}%`)\n })\n } else {\n // Store query for post-filtering if columns not specified\n searchQueryColumns = query\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n whereClauses.push(`${key} IN (${placeholders})`)\n } else {\n whereClauses.push(`${key} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n if (whereClauses.length > 0) {\n sql += ` WHERE ${whereClauses.join(' AND ')}`\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toUpperCase() === 'DESC' ? 'DESC' : 'ASC'\n sql += ` ORDER BY ${sortBy} ${sortOrderValue}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n // Only apply SQL pagination if we're not doing post-filtering\n if (!searchQueryColumns) {\n if (limitValue) {\n sql += ` LIMIT ?`\n queryParams.push(parseInt(limitValue))\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ?`\n queryParams.push(offsetValue)\n }\n }\n \n const result = await client.execute({\n sql,\n args: queryParams\n })\n \n let data = result.rows.map((row) => {\n const obj = {}\n result.columns.forEach((col, idx) => {\n obj[col] = row[col]\n })\n return obj\n })\n \n // Apply post-filtering for search without queryColumns\n if (searchQueryColumns) {\n const searchQuery = searchQueryColumns.toLowerCase()\n data = data.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n \n // Apply pagination after filtering\n if (limitValue) {\n const start = offsetValue || 0\n data = data.slice(start, start + parseInt(limitValue))\n } else if (offsetValue) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Turso fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n client.close()\n }\n }\n}\n");
22
+ return "import { createClient } from '@libsql/client'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ".concat(JSON.stringify(databaseUrl), ",\n authToken: ").concat((0, utils_1.replaceSecretReference)(token), "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n const whereClauses = []\n const queryParams = []\n let searchQueryColumns = null\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n const searchConditions = columns.map((col) => `${col} LIKE ?`)\n whereClauses.push(`(${searchConditions.join(' OR ')})`)\n columns.forEach(() => {\n queryParams.push(`%${query}%`)\n })\n } else {\n // Store query for post-filtering if columns not specified\n searchQueryColumns = query\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n whereClauses.push(`${key} IN (${placeholders})`)\n } else {\n whereClauses.push(`${key} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n if (whereClauses.length > 0) {\n sql += ` WHERE ${whereClauses.join(' AND ')}`\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toUpperCase() === 'DESC' ? 'DESC' : 'ASC'\n sql += ` ORDER BY ${sortBy} ${sortOrderValue}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n // Only apply SQL pagination if we're not doing post-filtering\n if (!searchQueryColumns) {\n if (limitValue) {\n sql += ` LIMIT ?`\n queryParams.push(parseInt(limitValue))\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ?`\n queryParams.push(offsetValue)\n }\n }\n \n const result = await client.execute({\n sql,\n args: queryParams\n })\n \n let data = result.rows.map((row) => {\n const obj = {}\n result.columns.forEach((col, idx) => {\n obj[col] = row[col]\n })\n return obj\n })\n \n // Apply post-filtering for search without queryColumns\n if (searchQueryColumns) {\n const searchQuery = searchQueryColumns.toLowerCase()\n data = data.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n \n // Apply pagination after filtering\n if (limitValue) {\n const start = offsetValue || 0\n data = data.slice(start, start + parseInt(limitValue))\n } else if (offsetValue) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Turso fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.close()\n } catch (error) {\n console.error('Error closing Turso client:', error)\n }\n }\n }\n}\n");
23
23
  };
24
24
  exports.generateTursoFetcher = generateTursoFetcher;
25
25
  //# sourceMappingURL=turso.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"turso.js","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,mBAAmB,GAAG,UACjC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;QACjE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,gCAAgC,EAAE,CAAA;KACnE;IAED,IAAI,CAAC,MAAM,CAAC,KAAK,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ,EAAE;QACrD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,wCAAwC,EAAE,CAAA;KAC3E;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAhBY,QAAA,mBAAmB,uBAgB/B;AASM,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,WAAW,GAAG,WAAW,CAAC,WAAW,CAAA;IAC3C,IAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAA;IAE/B,OAAO,oLAMI,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,iCACrB,IAAA,8BAAsB,EAAC,KAAK,CAAC,yKAKhB,SAAS,2wGAgHxC,CAAA;AACD,CAAC,CAAA;AArIY,QAAA,oBAAoB,wBAqIhC"}
1
+ {"version":3,"file":"turso.js","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,mBAAmB,GAAG,UACjC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;QACjE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,gCAAgC,EAAE,CAAA;KACnE;IAED,IAAI,CAAC,MAAM,CAAC,KAAK,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ,EAAE;QACrD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,wCAAwC,EAAE,CAAA;KAC3E;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAhBY,QAAA,mBAAmB,uBAgB/B;AASM,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,WAAW,GAAG,WAAW,CAAC,WAAW,CAAA;IAC3C,IAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAA;IAE/B,OAAO,oLAMI,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,iCACrB,IAAA,8BAAsB,EAAC,KAAK,CAAC,yKAKhB,SAAS,+3GAoHxC,CAAA;AACD,CAAC,CAAA;AAzIY,QAAA,oBAAoB,wBAyIhC"}