@teleporthq/teleport-plugin-next-data-source 0.40.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (240) hide show
  1. package/ARRAY_MAPPER_PAGINATION.md +1128 -0
  2. package/LICENSE +21 -0
  3. package/README.md +40 -0
  4. package/SEARCH_IMPLEMENTATION_SUMMARY.md +983 -0
  5. package/__tests__/fetchers.test.ts +545 -0
  6. package/__tests__/integration.test.ts +561 -0
  7. package/__tests__/mocks.ts +241 -0
  8. package/__tests__/pagination.test.ts +31 -0
  9. package/__tests__/plugin.test.ts +577 -0
  10. package/__tests__/utils.test.ts +430 -0
  11. package/__tests__/validation.test.ts +348 -0
  12. package/dist/cjs/array-mapper-pagination.d.ts +32 -0
  13. package/dist/cjs/array-mapper-pagination.d.ts.map +1 -0
  14. package/dist/cjs/array-mapper-pagination.js +77 -0
  15. package/dist/cjs/array-mapper-pagination.js.map +1 -0
  16. package/dist/cjs/count-fetchers.d.ts +12 -0
  17. package/dist/cjs/count-fetchers.d.ts.map +1 -0
  18. package/dist/cjs/count-fetchers.js +46 -0
  19. package/dist/cjs/count-fetchers.js.map +1 -0
  20. package/dist/cjs/data-source-fetchers.d.ts +14 -0
  21. package/dist/cjs/data-source-fetchers.d.ts.map +1 -0
  22. package/dist/cjs/data-source-fetchers.js +185 -0
  23. package/dist/cjs/data-source-fetchers.js.map +1 -0
  24. package/dist/cjs/fetchers/airtable.d.ts +6 -0
  25. package/dist/cjs/fetchers/airtable.d.ts.map +1 -0
  26. package/dist/cjs/fetchers/airtable.js +27 -0
  27. package/dist/cjs/fetchers/airtable.js.map +1 -0
  28. package/dist/cjs/fetchers/clickhouse.d.ts +6 -0
  29. package/dist/cjs/fetchers/clickhouse.d.ts.map +1 -0
  30. package/dist/cjs/fetchers/clickhouse.js +29 -0
  31. package/dist/cjs/fetchers/clickhouse.js.map +1 -0
  32. package/dist/cjs/fetchers/csv-file.d.ts +7 -0
  33. package/dist/cjs/fetchers/csv-file.d.ts.map +1 -0
  34. package/dist/cjs/fetchers/csv-file.js +36 -0
  35. package/dist/cjs/fetchers/csv-file.js.map +1 -0
  36. package/dist/cjs/fetchers/firestore.d.ts +6 -0
  37. package/dist/cjs/fetchers/firestore.d.ts.map +1 -0
  38. package/dist/cjs/fetchers/firestore.js +35 -0
  39. package/dist/cjs/fetchers/firestore.js.map +1 -0
  40. package/dist/cjs/fetchers/google-sheets.d.ts +6 -0
  41. package/dist/cjs/fetchers/google-sheets.d.ts.map +1 -0
  42. package/dist/cjs/fetchers/google-sheets.js +30 -0
  43. package/dist/cjs/fetchers/google-sheets.js.map +1 -0
  44. package/dist/cjs/fetchers/index.d.ts +17 -0
  45. package/dist/cjs/fetchers/index.d.ts.map +1 -0
  46. package/dist/cjs/fetchers/index.js +56 -0
  47. package/dist/cjs/fetchers/index.js.map +1 -0
  48. package/dist/cjs/fetchers/javascript.d.ts +7 -0
  49. package/dist/cjs/fetchers/javascript.d.ts.map +1 -0
  50. package/dist/cjs/fetchers/javascript.js +40 -0
  51. package/dist/cjs/fetchers/javascript.js.map +1 -0
  52. package/dist/cjs/fetchers/mariadb.d.ts +3 -0
  53. package/dist/cjs/fetchers/mariadb.d.ts.map +1 -0
  54. package/dist/cjs/fetchers/mariadb.js +23 -0
  55. package/dist/cjs/fetchers/mariadb.js.map +1 -0
  56. package/dist/cjs/fetchers/mongodb.d.ts +7 -0
  57. package/dist/cjs/fetchers/mongodb.d.ts.map +1 -0
  58. package/dist/cjs/fetchers/mongodb.js +52 -0
  59. package/dist/cjs/fetchers/mongodb.js.map +1 -0
  60. package/dist/cjs/fetchers/mysql.d.ts +3 -0
  61. package/dist/cjs/fetchers/mysql.d.ts.map +1 -0
  62. package/dist/cjs/fetchers/mysql.js +30 -0
  63. package/dist/cjs/fetchers/mysql.js.map +1 -0
  64. package/dist/cjs/fetchers/postgresql.d.ts +3 -0
  65. package/dist/cjs/fetchers/postgresql.d.ts.map +1 -0
  66. package/dist/cjs/fetchers/postgresql.js +25 -0
  67. package/dist/cjs/fetchers/postgresql.js.map +1 -0
  68. package/dist/cjs/fetchers/redis.d.ts +6 -0
  69. package/dist/cjs/fetchers/redis.d.ts.map +1 -0
  70. package/dist/cjs/fetchers/redis.js +46 -0
  71. package/dist/cjs/fetchers/redis.js.map +1 -0
  72. package/dist/cjs/fetchers/redshift.d.ts +2 -0
  73. package/dist/cjs/fetchers/redshift.d.ts.map +1 -0
  74. package/dist/cjs/fetchers/redshift.js +24 -0
  75. package/dist/cjs/fetchers/redshift.js.map +1 -0
  76. package/dist/cjs/fetchers/rest-api.d.ts +6 -0
  77. package/dist/cjs/fetchers/rest-api.d.ts.map +1 -0
  78. package/dist/cjs/fetchers/rest-api.js +58 -0
  79. package/dist/cjs/fetchers/rest-api.js.map +1 -0
  80. package/dist/cjs/fetchers/static-collection.d.ts +7 -0
  81. package/dist/cjs/fetchers/static-collection.d.ts.map +1 -0
  82. package/dist/cjs/fetchers/static-collection.js +24 -0
  83. package/dist/cjs/fetchers/static-collection.js.map +1 -0
  84. package/dist/cjs/fetchers/supabase.d.ts +7 -0
  85. package/dist/cjs/fetchers/supabase.d.ts.map +1 -0
  86. package/dist/cjs/fetchers/supabase.js +42 -0
  87. package/dist/cjs/fetchers/supabase.js.map +1 -0
  88. package/dist/cjs/fetchers/turso.d.ts +6 -0
  89. package/dist/cjs/fetchers/turso.d.ts.map +1 -0
  90. package/dist/cjs/fetchers/turso.js +25 -0
  91. package/dist/cjs/fetchers/turso.js.map +1 -0
  92. package/dist/cjs/index.d.ts +9 -0
  93. package/dist/cjs/index.d.ts.map +1 -0
  94. package/dist/cjs/index.js +325 -0
  95. package/dist/cjs/index.js.map +1 -0
  96. package/dist/cjs/pagination-plugin.d.ts +5 -0
  97. package/dist/cjs/pagination-plugin.d.ts.map +1 -0
  98. package/dist/cjs/pagination-plugin.js +1484 -0
  99. package/dist/cjs/pagination-plugin.js.map +1 -0
  100. package/dist/cjs/pagination-with-count.d.ts +6 -0
  101. package/dist/cjs/pagination-with-count.d.ts.map +1 -0
  102. package/dist/cjs/pagination-with-count.js +63 -0
  103. package/dist/cjs/pagination-with-count.js.map +1 -0
  104. package/dist/cjs/tsconfig.tsbuildinfo +1 -0
  105. package/dist/cjs/utils.d.ts +31 -0
  106. package/dist/cjs/utils.d.ts.map +1 -0
  107. package/dist/cjs/utils.js +763 -0
  108. package/dist/cjs/utils.js.map +1 -0
  109. package/dist/cjs/validation.d.ts +5 -0
  110. package/dist/cjs/validation.d.ts.map +1 -0
  111. package/dist/cjs/validation.js +29 -0
  112. package/dist/cjs/validation.js.map +1 -0
  113. package/dist/esm/array-mapper-pagination.d.ts +32 -0
  114. package/dist/esm/array-mapper-pagination.d.ts.map +1 -0
  115. package/dist/esm/array-mapper-pagination.js +72 -0
  116. package/dist/esm/array-mapper-pagination.js.map +1 -0
  117. package/dist/esm/count-fetchers.d.ts +12 -0
  118. package/dist/esm/count-fetchers.d.ts.map +1 -0
  119. package/dist/esm/count-fetchers.js +35 -0
  120. package/dist/esm/count-fetchers.js.map +1 -0
  121. package/dist/esm/data-source-fetchers.d.ts +14 -0
  122. package/dist/esm/data-source-fetchers.d.ts.map +1 -0
  123. package/dist/esm/data-source-fetchers.js +179 -0
  124. package/dist/esm/data-source-fetchers.js.map +1 -0
  125. package/dist/esm/fetchers/airtable.d.ts +6 -0
  126. package/dist/esm/fetchers/airtable.d.ts.map +1 -0
  127. package/dist/esm/fetchers/airtable.js +22 -0
  128. package/dist/esm/fetchers/airtable.js.map +1 -0
  129. package/dist/esm/fetchers/clickhouse.d.ts +6 -0
  130. package/dist/esm/fetchers/clickhouse.d.ts.map +1 -0
  131. package/dist/esm/fetchers/clickhouse.js +24 -0
  132. package/dist/esm/fetchers/clickhouse.js.map +1 -0
  133. package/dist/esm/fetchers/csv-file.d.ts +7 -0
  134. package/dist/esm/fetchers/csv-file.d.ts.map +1 -0
  135. package/dist/esm/fetchers/csv-file.js +30 -0
  136. package/dist/esm/fetchers/csv-file.js.map +1 -0
  137. package/dist/esm/fetchers/firestore.d.ts +6 -0
  138. package/dist/esm/fetchers/firestore.d.ts.map +1 -0
  139. package/dist/esm/fetchers/firestore.js +30 -0
  140. package/dist/esm/fetchers/firestore.js.map +1 -0
  141. package/dist/esm/fetchers/google-sheets.d.ts +6 -0
  142. package/dist/esm/fetchers/google-sheets.d.ts.map +1 -0
  143. package/dist/esm/fetchers/google-sheets.js +25 -0
  144. package/dist/esm/fetchers/google-sheets.js.map +1 -0
  145. package/dist/esm/fetchers/index.d.ts +17 -0
  146. package/dist/esm/fetchers/index.d.ts.map +1 -0
  147. package/dist/esm/fetchers/index.js +17 -0
  148. package/dist/esm/fetchers/index.js.map +1 -0
  149. package/dist/esm/fetchers/javascript.d.ts +7 -0
  150. package/dist/esm/fetchers/javascript.d.ts.map +1 -0
  151. package/dist/esm/fetchers/javascript.js +34 -0
  152. package/dist/esm/fetchers/javascript.js.map +1 -0
  153. package/dist/esm/fetchers/mariadb.d.ts +3 -0
  154. package/dist/esm/fetchers/mariadb.d.ts.map +1 -0
  155. package/dist/esm/fetchers/mariadb.js +18 -0
  156. package/dist/esm/fetchers/mariadb.js.map +1 -0
  157. package/dist/esm/fetchers/mongodb.d.ts +7 -0
  158. package/dist/esm/fetchers/mongodb.d.ts.map +1 -0
  159. package/dist/esm/fetchers/mongodb.js +46 -0
  160. package/dist/esm/fetchers/mongodb.js.map +1 -0
  161. package/dist/esm/fetchers/mysql.d.ts +3 -0
  162. package/dist/esm/fetchers/mysql.d.ts.map +1 -0
  163. package/dist/esm/fetchers/mysql.js +25 -0
  164. package/dist/esm/fetchers/mysql.js.map +1 -0
  165. package/dist/esm/fetchers/postgresql.d.ts +3 -0
  166. package/dist/esm/fetchers/postgresql.d.ts.map +1 -0
  167. package/dist/esm/fetchers/postgresql.js +20 -0
  168. package/dist/esm/fetchers/postgresql.js.map +1 -0
  169. package/dist/esm/fetchers/redis.d.ts +6 -0
  170. package/dist/esm/fetchers/redis.d.ts.map +1 -0
  171. package/dist/esm/fetchers/redis.js +41 -0
  172. package/dist/esm/fetchers/redis.js.map +1 -0
  173. package/dist/esm/fetchers/redshift.d.ts +2 -0
  174. package/dist/esm/fetchers/redshift.d.ts.map +1 -0
  175. package/dist/esm/fetchers/redshift.js +20 -0
  176. package/dist/esm/fetchers/redshift.js.map +1 -0
  177. package/dist/esm/fetchers/rest-api.d.ts +6 -0
  178. package/dist/esm/fetchers/rest-api.d.ts.map +1 -0
  179. package/dist/esm/fetchers/rest-api.js +53 -0
  180. package/dist/esm/fetchers/rest-api.js.map +1 -0
  181. package/dist/esm/fetchers/static-collection.d.ts +7 -0
  182. package/dist/esm/fetchers/static-collection.d.ts.map +1 -0
  183. package/dist/esm/fetchers/static-collection.js +18 -0
  184. package/dist/esm/fetchers/static-collection.js.map +1 -0
  185. package/dist/esm/fetchers/supabase.d.ts +7 -0
  186. package/dist/esm/fetchers/supabase.d.ts.map +1 -0
  187. package/dist/esm/fetchers/supabase.js +36 -0
  188. package/dist/esm/fetchers/supabase.js.map +1 -0
  189. package/dist/esm/fetchers/turso.d.ts +6 -0
  190. package/dist/esm/fetchers/turso.d.ts.map +1 -0
  191. package/dist/esm/fetchers/turso.js +20 -0
  192. package/dist/esm/fetchers/turso.js.map +1 -0
  193. package/dist/esm/index.d.ts +9 -0
  194. package/dist/esm/index.d.ts.map +1 -0
  195. package/dist/esm/index.js +306 -0
  196. package/dist/esm/index.js.map +1 -0
  197. package/dist/esm/pagination-plugin.d.ts +5 -0
  198. package/dist/esm/pagination-plugin.d.ts.map +1 -0
  199. package/dist/esm/pagination-plugin.js +1457 -0
  200. package/dist/esm/pagination-plugin.js.map +1 -0
  201. package/dist/esm/pagination-with-count.d.ts +6 -0
  202. package/dist/esm/pagination-with-count.d.ts.map +1 -0
  203. package/dist/esm/pagination-with-count.js +34 -0
  204. package/dist/esm/pagination-with-count.js.map +1 -0
  205. package/dist/esm/tsconfig.tsbuildinfo +1 -0
  206. package/dist/esm/utils.d.ts +31 -0
  207. package/dist/esm/utils.d.ts.map +1 -0
  208. package/dist/esm/utils.js +722 -0
  209. package/dist/esm/utils.js.map +1 -0
  210. package/dist/esm/validation.d.ts +5 -0
  211. package/dist/esm/validation.d.ts.map +1 -0
  212. package/dist/esm/validation.js +25 -0
  213. package/dist/esm/validation.js.map +1 -0
  214. package/package.json +33 -0
  215. package/src/array-mapper-pagination.ts +113 -0
  216. package/src/count-fetchers.ts +99 -0
  217. package/src/data-source-fetchers.ts +313 -0
  218. package/src/fetchers/airtable.ts +153 -0
  219. package/src/fetchers/clickhouse.ts +127 -0
  220. package/src/fetchers/csv-file.ts +163 -0
  221. package/src/fetchers/firestore.ts +138 -0
  222. package/src/fetchers/google-sheets.ts +189 -0
  223. package/src/fetchers/index.ts +32 -0
  224. package/src/fetchers/javascript.ts +150 -0
  225. package/src/fetchers/mariadb.ts +230 -0
  226. package/src/fetchers/mongodb.ts +239 -0
  227. package/src/fetchers/mysql.ts +237 -0
  228. package/src/fetchers/postgresql.ts +247 -0
  229. package/src/fetchers/redis.ts +152 -0
  230. package/src/fetchers/redshift.ts +138 -0
  231. package/src/fetchers/rest-api.ts +148 -0
  232. package/src/fetchers/static-collection.ts +149 -0
  233. package/src/fetchers/supabase.ts +246 -0
  234. package/src/fetchers/turso.ts +131 -0
  235. package/src/index.ts +352 -0
  236. package/src/pagination-plugin.ts +2335 -0
  237. package/src/pagination-with-count.ts +89 -0
  238. package/src/utils.ts +1013 -0
  239. package/src/validation.ts +32 -0
  240. package/tsconfig.json +9 -0
@@ -0,0 +1,30 @@
1
+ export var validateCSVConfig = function (config) {
2
+ if (!config || typeof config !== 'object') {
3
+ return { isValid: false, error: 'Config must be a valid object' };
4
+ }
5
+ if (!config.parsedData || !Array.isArray(config.parsedData)) {
6
+ return { isValid: false, error: 'Parsed data must be an array' };
7
+ }
8
+ // Columns are optional - if not provided, we'll infer them from parsedData
9
+ if (config.columns !== undefined) {
10
+ if (!Array.isArray(config.columns)) {
11
+ return { isValid: false, error: 'Columns definition must be an array' };
12
+ }
13
+ for (var _i = 0, _a = config.columns; _i < _a.length; _i++) {
14
+ var column = _a[_i];
15
+ if (!column || typeof column !== 'object' || !column.id || typeof column.id !== 'string') {
16
+ return { isValid: false, error: 'Each column must have a valid id' };
17
+ }
18
+ }
19
+ }
20
+ return { isValid: true };
21
+ };
22
+ export var generateCSVFileFetcher = function (config) {
23
+ var csvConfig = config;
24
+ return "const data = ".concat(JSON.stringify(csvConfig.parsedData || []), "\n\nexport default async function handler(req, res) {\n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n let filteredData = [...data]\n \n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n const columns = JSON.parse(queryColumns)\n filteredData = filteredData.filter((item) => {\n return columns.some((col) => {\n const value = item[col]\n return value && String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n filteredData = filteredData.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n filteredData = filteredData.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n }\n \n if (sortBy) {\n filteredData.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const limitValue = limit || perPage\n const offsetValue = offsetParam !== undefined ? parseInt(offsetParam) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n filteredData = filteredData.slice(offsetValue, offsetValue + parseInt(limitValue))\n }\n \n const safeData = JSON.parse(JSON.stringify(filteredData))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('CSV fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
25
+ };
26
+ // tslint:disable-next-line:variable-name
27
+ export var generateCSVCountFetcher = function (_config) {
28
+ return "\nasync function getCount(req, res) {\n try {\n const { query, queryColumns, filters } = req.query\n const fakeReq = { query: { query, queryColumns, filters }, method: 'GET' }\n let result = null\n let statusCode = 200\n \n const fakeRes = {\n status: (code) => {\n statusCode = code\n return fakeRes\n },\n json: (data) => {\n result = data\n return fakeRes\n },\n }\n \n await handler(fakeReq, fakeRes)\n \n if (statusCode !== 200 || !result || !result.success) {\n return res.status(500).json({\n success: false,\n error: 'Failed to get data for counting',\n timestamp: Date.now()\n })\n }\n \n const count = Array.isArray(result.data) ? result.data.length : 0\n \n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n";
29
+ };
30
+ //# sourceMappingURL=csv-file.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"csv-file.js","sourceRoot":"","sources":["../../../src/fetchers/csv-file.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,iBAAiB,GAAG,UAC/B,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8BAA8B,EAAE,CAAA;KACjE;IAED,2EAA2E;IAC3E,IAAI,MAAM,CAAC,OAAO,KAAK,SAAS,EAAE;QAChC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE;YAClC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;SACxE;QAED,KAAqB,UAAc,EAAd,KAAA,MAAM,CAAC,OAAO,EAAd,cAAc,EAAd,IAAc,EAAE;YAAhC,IAAM,MAAM,SAAA;YACf,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,OAAO,MAAM,CAAC,EAAE,KAAK,QAAQ,EAAE;gBACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;aACrE;SACF;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAOD,MAAM,CAAC,IAAM,sBAAsB,GAAG,UAAC,MAA+B;IACpE,IAAM,SAAS,GAAG,MAAuB,CAAA;IACzC,OAAO,uBAAgB,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,mzEA6ElE,CAAA;AACD,CAAC,CAAA;AAED,yCAAyC;AACzC,MAAM,CAAC,IAAM,uBAAuB,GAAG,UAAC,OAAY;IAClD,OAAO,knCA6CR,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,6 @@
1
+ export declare const validateFirestoreConfig: (config: Record<string, unknown>) => {
2
+ isValid: boolean;
3
+ error?: string;
4
+ };
5
+ export declare const generateFirestoreFetcher: (config: Record<string, unknown>, tableName: string) => string;
6
+ //# sourceMappingURL=firestore.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.d.ts","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAyBpC,CAAA;AAOD,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkGF,CAAA"}
@@ -0,0 +1,30 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var validateFirestoreConfig = function (config) {
3
+ if (!config || typeof config !== 'object') {
4
+ return { isValid: false, error: 'Config must be a valid object' };
5
+ }
6
+ if (!config.serviceAccount || typeof config.serviceAccount !== 'string') {
7
+ return { isValid: false, error: 'Firestore service account JSON is required' };
8
+ }
9
+ var serviceAccount = config.serviceAccount;
10
+ // If serviceAccount is a secret reference, we assume the runtime env var will contain valid JSON
11
+ // Example: "teleporthq.secrets.DATA_SOURCE_FIRESTORE_SERVICE_ACCOUNT"
12
+ if (!serviceAccount.startsWith('teleporthq.secrets.')) {
13
+ try {
14
+ var parsed = JSON.parse(serviceAccount);
15
+ if (!parsed.project_id || !parsed.private_key || !parsed.client_email) {
16
+ return { isValid: false, error: 'Invalid Firestore service account JSON structure' };
17
+ }
18
+ }
19
+ catch (_a) {
20
+ return { isValid: false, error: 'Service account must be valid JSON' };
21
+ }
22
+ }
23
+ return { isValid: true };
24
+ };
25
+ export var generateFirestoreFetcher = function (config, tableName) {
26
+ var firestoreConfig = config;
27
+ var serviceAccount = firestoreConfig.serviceAccount;
28
+ return "import * as admin from 'firebase-admin'\n\nlet firestore = null\n\nconst getFirestore = () => {\n if (firestore) return firestore\n \n const rawServiceAccount = ".concat(replaceSecretReference(serviceAccount), "\n let serviceAccount\n\n try {\n serviceAccount = JSON.parse(rawServiceAccount)\n } catch (error) {\n throw new Error('Invalid Firestore service account JSON: ' + error.message)\n }\n \n if (!admin.apps.length) {\n admin.initializeApp({\n credential: admin.credential.cert(serviceAccount)\n })\n }\n \n firestore = admin.firestore()\n return firestore\n}\n\nexport default async function handler(req, res) {\n try {\n const firestore = getFirestore()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let queryRef = firestore.collection('").concat(tableName, "')\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n queryRef = queryRef.where(key, 'in', value)\n } else {\n queryRef = queryRef.where(key, '==', value)\n }\n })\n }\n \n if (query && queryColumns) {\n const columns = JSON.parse(queryColumns)\n for (const column of columns) {\n queryRef = queryRef\n .where(column, '>=', query)\n .where(column, '<=', query + '\\uf8ff')\n }\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? 'desc' : 'asc'\n queryRef = queryRef.orderBy(sortBy, sortOrderValue)\n }\n \n const limitValue = limit || perPage\n if (limitValue) {\n queryRef = queryRef.limit(parseInt(limitValue))\n }\n \n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage && parseInt(page) > 1 ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n if (offsetValue !== undefined) {\n queryRef = queryRef.offset(offsetValue)\n }\n \n const snapshot = await queryRef.get()\n const documents = []\n snapshot.forEach((doc) => {\n documents.push({\n id: doc.id,\n ...doc.data()\n })\n })\n \n const safeData = JSON.parse(JSON.stringify(documents))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Firestore fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
29
+ };
30
+ //# sourceMappingURL=firestore.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.js","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAA;AAEjD,MAAM,CAAC,IAAM,uBAAuB,GAAG,UACrC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,cAAc,IAAI,OAAO,MAAM,CAAC,cAAc,KAAK,QAAQ,EAAE;QACvE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4CAA4C,EAAE,CAAA;KAC/E;IAED,IAAM,cAAc,GAAG,MAAM,CAAC,cAAwB,CAAA;IAEtD,iGAAiG;IACjG,sEAAsE;IACtE,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE;QACrD,IAAI;YACF,IAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;YACzC,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACrE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kDAAkD,EAAE,CAAA;aACrF;SACF;QAAC,WAAM;YACN,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,oCAAoC,EAAE,CAAA;SACvE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAOD,MAAM,CAAC,IAAM,wBAAwB,GAAG,UACtC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,eAAe,GAAG,MAAyB,CAAA;IACjD,IAAM,cAAc,GAAG,eAAe,CAAC,cAAc,CAAA;IAErD,OAAO,8KAOqB,sBAAsB,CAAC,cAAc,CAAC,yoBAwBzB,SAAS,wvDA8DnD,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,6 @@
1
+ export declare const validateGoogleSheetsConfig: (config: Record<string, unknown>) => {
2
+ isValid: boolean;
3
+ error?: string;
4
+ };
5
+ export declare const generateGoogleSheetsFetcher: (config: Record<string, unknown>) => string;
6
+ //# sourceMappingURL=google-sheets.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"google-sheets.d.ts","sourceRoot":"","sources":["../../../src/fetchers/google-sheets.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,0BAA0B,WAC7B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAwBpC,CAAA;AAWD,eAAO,MAAM,2BAA2B,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAuJ7E,CAAA"}
@@ -0,0 +1,25 @@
1
+ export var validateGoogleSheetsConfig = function (config) {
2
+ if (!config || typeof config !== 'object') {
3
+ return { isValid: false, error: 'Config must be a valid object' };
4
+ }
5
+ if (!config.sheetId && !config.sheetUrl) {
6
+ return { isValid: false, error: 'Google Sheets ID or URL is required' };
7
+ }
8
+ if (config.sheetId && typeof config.sheetId !== 'string') {
9
+ return { isValid: false, error: 'Sheet ID must be a string' };
10
+ }
11
+ if (config.sheetUrl) {
12
+ if (typeof config.sheetUrl !== 'string') {
13
+ return { isValid: false, error: 'Sheet URL must be a string' };
14
+ }
15
+ if (!config.sheetUrl.includes('docs.google.com/spreadsheets')) {
16
+ return { isValid: false, error: 'Invalid Google Sheets URL format' };
17
+ }
18
+ }
19
+ return { isValid: true };
20
+ };
21
+ export var generateGoogleSheetsFetcher = function (config) {
22
+ var sheetsConfig = config;
23
+ return "import fetch from 'node-fetch'\n\nexport default async function handler(req, res) {\n try {\n const sheetUrl = ".concat(JSON.stringify(sheetsConfig.sheetUrl), "\n let sheetId = ").concat(JSON.stringify(sheetsConfig.sheetId), "\n const range = ").concat(JSON.stringify(sheetsConfig.range || 'A1:Z1000'), "\n const maxRows = ").concat(sheetsConfig.maxRows || 0, "\n \n if (!sheetId && sheetUrl) {\n const match = sheetUrl.match(/\\/d\\/([a-zA-Z0-9-_]+)/)\n sheetId = match ? match[1] : undefined\n }\n \n if (!sheetId) {\n return res.status(400).json({\n success: false,\n error: 'Invalid Google Sheets URL or Sheet ID',\n timestamp: Date.now()\n })\n }\n \n let url = `https://docs.google.com/spreadsheets/d/${sheetId}/gviz/tq?tqx=out:json&range=${range}`\n \n if (maxRows && maxRows > 0) {\n url += `&tq=limit ${maxRows}`\n }\n \n const response = await fetch(url)\n \n if (!response.ok) {\n return res.status(response.status).json({\n success: false,\n error: `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n const text = await response.text()\n const jsonMatch = text.match(/google\\.visualization\\.Query\\.setResponse\\((.*)\\);/)\n \n if (!jsonMatch) {\n return res.status(500).json({\n success: false,\n error: 'Unable to parse Google Sheets response',\n timestamp: Date.now()\n })\n }\n \n const data = JSON.parse(jsonMatch[1])\n \n if (data.status === 'error') {\n return res.status(500).json({\n success: false,\n error: data.errors?.[0]?.detailed_message || 'Failed to fetch Google Sheets data',\n timestamp: Date.now()\n })\n }\n \n const table = data.table\n const columns = table.cols.map((col, index) => ({\n id: col.id || `col_${index}`,\n label: col.label || `Column ${index + 1}`,\n type: col.type || 'string'\n }))\n \n const rows = table.rows.map((row) => {\n const rowData = {}\n row.c.forEach((cell, index) => {\n const columnId = columns[index].id\n rowData[columnId] = cell?.v ?? null\n })\n return rowData\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n let filteredData = [...rows]\n \n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n const searchColumns = JSON.parse(queryColumns)\n filteredData = filteredData.filter((item) => {\n return searchColumns.some((col) => {\n const value = item[col]\n return value && String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n filteredData = filteredData.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n filteredData = filteredData.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n }\n \n if (sortBy) {\n filteredData.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const limitValue = limit || perPage\n const offsetValue = offsetParam !== undefined ? parseInt(offsetParam) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n filteredData = filteredData.slice(offsetValue, offsetValue + parseInt(limitValue))\n }\n \n const safeData = JSON.parse(JSON.stringify(filteredData))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Google Sheets fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
24
+ };
25
+ //# sourceMappingURL=google-sheets.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"google-sheets.js","sourceRoot":"","sources":["../../../src/fetchers/google-sheets.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,0BAA0B,GAAG,UACxC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACvC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;KACxE;IAED,IAAI,MAAM,CAAC,OAAO,IAAI,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,IAAI,MAAM,CAAC,QAAQ,EAAE;QACnB,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACvC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;SAC/D;QAED,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,8BAA8B,CAAC,EAAE;YAC7D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;SACrE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAWD,MAAM,CAAC,IAAM,2BAA2B,GAAG,UAAC,MAA+B;IACzE,IAAM,YAAY,GAAG,MAA4B,CAAA;IACjD,OAAO,6HAIc,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,QAAQ,CAAC,iCACxC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,OAAO,CAAC,iCACpC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,IAAI,UAAU,CAAC,mCAC9C,YAAY,CAAC,OAAO,IAAI,CAAC,goIA6I9C,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,17 @@
1
+ export { generatePostgreSQLFetcher, generatePostgreSQLCountFetcher } from './postgresql';
2
+ export { generateMySQLFetcher, generateMySQLCountFetcher } from './mysql';
3
+ export { generateMariaDBFetcher, generateMariaDBCountFetcher } from './mariadb';
4
+ export { generateRedshiftFetcher } from './redshift';
5
+ export { generateMongoDBFetcher, generateMongoDBCountFetcher, validateMongoDBConfig, } from './mongodb';
6
+ export { generateRedisFetcher, validateRedisConfig } from './redis';
7
+ export { generateFirestoreFetcher, validateFirestoreConfig } from './firestore';
8
+ export { generateClickHouseFetcher, validateClickHouseConfig } from './clickhouse';
9
+ export { generateAirtableFetcher, validateAirtableConfig } from './airtable';
10
+ export { generateSupabaseFetcher, generateSupabaseCountFetcher, validateSupabaseConfig, } from './supabase';
11
+ export { generateTursoFetcher, validateTursoConfig } from './turso';
12
+ export { generateRESTAPIFetcher, validateRESTAPIConfig } from './rest-api';
13
+ export { generateJavaScriptFetcher, generateJavaScriptCountFetcher, validateJavaScriptConfig, } from './javascript';
14
+ export { generateCSVFileFetcher, generateCSVCountFetcher, validateCSVConfig } from './csv-file';
15
+ export { generateStaticCollectionFetcher, generateStaticCollectionCountFetcher, validateStaticCollectionConfig, } from './static-collection';
16
+ export { generateGoogleSheetsFetcher, validateGoogleSheetsConfig } from './google-sheets';
17
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/fetchers/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,yBAAyB,EAAE,8BAA8B,EAAE,MAAM,cAAc,CAAA;AACxF,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAE,MAAM,SAAS,CAAA;AACzE,OAAO,EAAE,sBAAsB,EAAE,2BAA2B,EAAE,MAAM,WAAW,CAAA;AAC/E,OAAO,EAAE,uBAAuB,EAAE,MAAM,YAAY,CAAA;AACpD,OAAO,EACL,sBAAsB,EACtB,2BAA2B,EAC3B,qBAAqB,GACtB,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,oBAAoB,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAA;AACnE,OAAO,EAAE,wBAAwB,EAAE,uBAAuB,EAAE,MAAM,aAAa,CAAA;AAC/E,OAAO,EAAE,yBAAyB,EAAE,wBAAwB,EAAE,MAAM,cAAc,CAAA;AAClF,OAAO,EAAE,uBAAuB,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EACL,uBAAuB,EACvB,4BAA4B,EAC5B,sBAAsB,GACvB,MAAM,YAAY,CAAA;AACnB,OAAO,EAAE,oBAAoB,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAA;AACnE,OAAO,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAA;AAC1E,OAAO,EACL,yBAAyB,EACzB,8BAA8B,EAC9B,wBAAwB,GACzB,MAAM,cAAc,CAAA;AACrB,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAC/F,OAAO,EACL,+BAA+B,EAC/B,oCAAoC,EACpC,8BAA8B,GAC/B,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,2BAA2B,EAAE,0BAA0B,EAAE,MAAM,iBAAiB,CAAA"}
@@ -0,0 +1,17 @@
1
+ export { generatePostgreSQLFetcher, generatePostgreSQLCountFetcher } from './postgresql';
2
+ export { generateMySQLFetcher, generateMySQLCountFetcher } from './mysql';
3
+ export { generateMariaDBFetcher, generateMariaDBCountFetcher } from './mariadb';
4
+ export { generateRedshiftFetcher } from './redshift';
5
+ export { generateMongoDBFetcher, generateMongoDBCountFetcher, validateMongoDBConfig, } from './mongodb';
6
+ export { generateRedisFetcher, validateRedisConfig } from './redis';
7
+ export { generateFirestoreFetcher, validateFirestoreConfig } from './firestore';
8
+ export { generateClickHouseFetcher, validateClickHouseConfig } from './clickhouse';
9
+ export { generateAirtableFetcher, validateAirtableConfig } from './airtable';
10
+ export { generateSupabaseFetcher, generateSupabaseCountFetcher, validateSupabaseConfig, } from './supabase';
11
+ export { generateTursoFetcher, validateTursoConfig } from './turso';
12
+ export { generateRESTAPIFetcher, validateRESTAPIConfig } from './rest-api';
13
+ export { generateJavaScriptFetcher, generateJavaScriptCountFetcher, validateJavaScriptConfig, } from './javascript';
14
+ export { generateCSVFileFetcher, generateCSVCountFetcher, validateCSVConfig } from './csv-file';
15
+ export { generateStaticCollectionFetcher, generateStaticCollectionCountFetcher, validateStaticCollectionConfig, } from './static-collection';
16
+ export { generateGoogleSheetsFetcher, validateGoogleSheetsConfig } from './google-sheets';
17
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/fetchers/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,yBAAyB,EAAE,8BAA8B,EAAE,MAAM,cAAc,CAAA;AACxF,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAE,MAAM,SAAS,CAAA;AACzE,OAAO,EAAE,sBAAsB,EAAE,2BAA2B,EAAE,MAAM,WAAW,CAAA;AAC/E,OAAO,EAAE,uBAAuB,EAAE,MAAM,YAAY,CAAA;AACpD,OAAO,EACL,sBAAsB,EACtB,2BAA2B,EAC3B,qBAAqB,GACtB,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,oBAAoB,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAA;AACnE,OAAO,EAAE,wBAAwB,EAAE,uBAAuB,EAAE,MAAM,aAAa,CAAA;AAC/E,OAAO,EAAE,yBAAyB,EAAE,wBAAwB,EAAE,MAAM,cAAc,CAAA;AAClF,OAAO,EAAE,uBAAuB,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EACL,uBAAuB,EACvB,4BAA4B,EAC5B,sBAAsB,GACvB,MAAM,YAAY,CAAA;AACnB,OAAO,EAAE,oBAAoB,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAA;AACnE,OAAO,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAA;AAC1E,OAAO,EACL,yBAAyB,EACzB,8BAA8B,EAC9B,wBAAwB,GACzB,MAAM,cAAc,CAAA;AACrB,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAC/F,OAAO,EACL,+BAA+B,EAC/B,oCAAoC,EACpC,8BAA8B,GAC/B,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,2BAA2B,EAAE,0BAA0B,EAAE,MAAM,iBAAiB,CAAA"}
@@ -0,0 +1,7 @@
1
+ export declare const validateJavaScriptConfig: (config: Record<string, unknown>) => {
2
+ isValid: boolean;
3
+ error?: string;
4
+ };
5
+ export declare const generateJavaScriptFetcher: (config: Record<string, unknown>) => string;
6
+ export declare const generateJavaScriptCountFetcher: (_config: any) => string;
7
+ //# sourceMappingURL=javascript.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"javascript.d.ts","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA2BpC,CAAA;AAMD,eAAO,MAAM,yBAAyB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAgE3E,CAAA;AAGD,eAAO,MAAM,8BAA8B,YAAa,GAAG,KAAG,MA+C7D,CAAA"}
@@ -0,0 +1,34 @@
1
+ export var validateJavaScriptConfig = function (config) {
2
+ if (!config || typeof config !== 'object') {
3
+ return { isValid: false, error: 'Config must be a valid object' };
4
+ }
5
+ if (!config.code || typeof config.code !== 'string' || config.code.trim() === '') {
6
+ return { isValid: false, error: 'JavaScript code is required' };
7
+ }
8
+ var dangerousPatterns = [
9
+ /require\s*\(/i,
10
+ /import\s+/i,
11
+ /eval\s*\(/i,
12
+ /Function\s*\(/i,
13
+ /process\./i,
14
+ /global\./i,
15
+ /\.exec\s*\(/i,
16
+ ];
17
+ for (var _i = 0, dangerousPatterns_1 = dangerousPatterns; _i < dangerousPatterns_1.length; _i++) {
18
+ var pattern = dangerousPatterns_1[_i];
19
+ if (pattern.test(config.code)) {
20
+ console.warn('[Data Source] Warning: JavaScript code contains potentially dangerous patterns');
21
+ break;
22
+ }
23
+ }
24
+ return { isValid: true };
25
+ };
26
+ export var generateJavaScriptFetcher = function (config) {
27
+ var jsConfig = config;
28
+ return "export default async function handler(req, res) {\n try {\n const { limit, offset, page, perPage, query, queryColumns } = req.query\n \n const code = ".concat(JSON.stringify(jsConfig.code), "\n const executeCode = new Function('return ' + code)\n let data = executeCode()\n \n if (Array.isArray(data)) {\n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n // Search specific columns\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n data = data.filter(item => {\n return columns.some(col => {\n const value = item[col]\n if (value === null || value === undefined) return false\n return String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n // Search across all fields by stringifying the entire record\n data = data.filter(item => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n data = data.slice(offsetValue, offsetValue + parseInt(limitValue))\n } else if (offsetValue > 0) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('JavaScript execution error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to execute code',\n timestamp: Date.now()\n })\n }\n}\n");
29
+ };
30
+ // tslint:disable-next-line:variable-name
31
+ export var generateJavaScriptCountFetcher = function (_config) {
32
+ return "\nasync function getCount(req, res) {\n try {\n const { query, queryColumns } = req.query\n const fakeReq = { query: { query, queryColumns }, method: 'GET' }\n let result = null\n let statusCode = 200\n \n const fakeRes = {\n status: (code) => {\n statusCode = code\n return fakeRes\n },\n json: (data) => {\n result = data\n return fakeRes\n },\n }\n \n await handler(fakeReq, fakeRes)\n \n if (statusCode !== 200 || !result || !result.success) {\n return res.status(500).json({\n success: false,\n error: 'Failed to get data for counting',\n timestamp: Date.now()\n })\n }\n \n const count = Array.isArray(result.data) ? result.data.length : 0\n \n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n";
33
+ };
34
+ //# sourceMappingURL=javascript.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"javascript.js","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAChF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,6BAA6B,EAAE,CAAA;KAChE;IAED,IAAM,iBAAiB,GAAG;QACxB,eAAe;QACf,YAAY;QACZ,YAAY;QACZ,gBAAgB;QAChB,YAAY;QACZ,WAAW;QACX,cAAc;KACf,CAAA;IAED,KAAsB,UAAiB,EAAjB,uCAAiB,EAAjB,+BAAiB,EAAjB,IAAiB,EAAE;QAApC,IAAM,OAAO,0BAAA;QAChB,IAAI,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAC7B,OAAO,CAAC,IAAI,CAAC,gFAAgF,CAAC,CAAA;YAC9F,MAAK;SACN;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAMD,MAAM,CAAC,IAAM,yBAAyB,GAAG,UAAC,MAA+B;IACvE,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,OAAO,0KAIU,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,m3DAyD/C,CAAA;AACD,CAAC,CAAA;AAED,yCAAyC;AACzC,MAAM,CAAC,IAAM,8BAA8B,GAAG,UAAC,OAAY;IACzD,OAAO,gmCA6CR,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,3 @@
1
+ export declare const generateMariaDBFetcher: (config: Record<string, unknown>, tableName: string) => string;
2
+ export declare const generateMariaDBCountFetcher: (config: Record<string, unknown>, tableName: string) => string;
3
+ //# sourceMappingURL=mariadb.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mariadb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAuIF,CAAA;AAED,eAAO,MAAM,2BAA2B,WAC9B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAyEF,CAAA"}
@@ -0,0 +1,18 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var generateMariaDBFetcher = function (config, tableName) {
3
+ var mariaConfig = config;
4
+ var database = mariaConfig.database;
5
+ return "import mariadb from 'mariadb'\n\nexport default async function handler(req, res) {\n let pool = null\n try {\n pool = mariadb.createPool({\n host: ".concat(JSON.stringify(mariaConfig.host), ",\n port: ").concat(mariaConfig.port || 3306, ",\n user: ").concat(JSON.stringify(mariaConfig.user), ",\n password: ").concat(replaceSecretReference(mariaConfig.password), ",\n database: ").concat(JSON.stringify(mariaConfig.database), ",\n ssl: ").concat(mariaConfig.ssl || false).concat(mariaConfig.sslConfig
6
+ ? ",\n sslConfig: {\n ".concat(mariaConfig.sslConfig.ca ? "ca: ".concat(replaceSecretReference(mariaConfig.sslConfig.ca), ",") : '', "\n ").concat(mariaConfig.sslConfig.cert
7
+ ? "cert: ".concat(replaceSecretReference(mariaConfig.sslConfig.cert), ",")
8
+ : '', "\n ").concat(mariaConfig.sslConfig.key
9
+ ? "key: ".concat(replaceSecretReference(mariaConfig.sslConfig.key), ",")
10
+ : '', "\n rejectUnauthorized: ").concat(mariaConfig.sslConfig.rejectUnauthorized !== false, "\n }")
11
+ : '', "\n })\n \n const connection = await pool.getConnection()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(\\`${col}\\` AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`\\`${key}\\` IN (${placeholders})`)\n } else {\n conditions.push(`\\`${key}\\` = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM \\`").concat(tableName, "\\``\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY \\`${sortBy}\\` ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const rows = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n connection.release()\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MariaDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (pool) {\n await pool.end()\n }\n }\n}\n");
12
+ };
13
+ export var generateMariaDBCountFetcher = function (config, tableName) {
14
+ var mariaConfig = config;
15
+ var database = mariaConfig.database;
16
+ return "\nasync function getCount(req, res) {\n const connection = getConnection()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [".concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `CAST(${col} AS CHAR) LIKE ?`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} ?`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) as count FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const [rows] = await connection.execute(countSql, queryParams)\n const count = rows[0].count\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
17
+ };
18
+ //# sourceMappingURL=mariadb.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mariadb.js","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAA;AAajD,MAAM,CAAC,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,uKAMK,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,4BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,4BACxB,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,gCAC5B,sBAAsB,CAAC,WAAW,CAAC,QAAQ,CAAC,gCAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,2BACzC,WAAW,CAAC,GAAG,IAAI,KAAK,SACjC,WAAW,CAAC,SAAS;QACnB,CAAC,CAAC,yCAGE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,sBAAsB,CAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,uBAG1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,sBAAsB,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,uBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,sBAAsB,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,2CAEc,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,KAAK,cACxE;QACF,CAAC,CAAC,EAAE,4tBAuBK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,kkCA8BjC,SAAS,8xCA+C5C,CAAA;AACD,CAAC,CAAA;AAED,MAAM,CAAC,IAAM,2BAA2B,GAAG,UACzC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,sxBAsBM,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwBhB,SAAS,yiBAsB7D,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,7 @@
1
+ export declare const validateMongoDBConfig: (config: Record<string, unknown>) => {
2
+ isValid: boolean;
3
+ error?: string;
4
+ };
5
+ export declare const generateMongoDBFetcher: (config: Record<string, unknown>, tableName: string) => string;
6
+ export declare const generateMongoDBCountFetcher: (_config: any, tableName: string) => string;
7
+ //# sourceMappingURL=mongodb.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mongodb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAoCpC,CAAA;AAWD,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAwHF,CAAA;AAGD,eAAO,MAAM,2BAA2B,YAAa,GAAG,aAAa,MAAM,KAAG,MA6D7E,CAAA"}
@@ -0,0 +1,46 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var validateMongoDBConfig = function (config) {
3
+ if (!config || typeof config !== 'object') {
4
+ return { isValid: false, error: 'Config must be a valid object' };
5
+ }
6
+ // If connectionString is provided, validate it
7
+ if (config.connectionString) {
8
+ if (typeof config.connectionString !== 'string' || config.connectionString.trim() === '') {
9
+ return { isValid: false, error: 'Connection string must be a non-empty string' };
10
+ }
11
+ // Only validate format if it's not a secret reference that will be resolved at runtime
12
+ var connStr = config.connectionString;
13
+ if (!connStr.startsWith('teleporthq.secrets.') &&
14
+ !connStr.startsWith('mongodb://') &&
15
+ !connStr.startsWith('mongodb+srv://')) {
16
+ return { isValid: false, error: 'Invalid MongoDB connection string format' };
17
+ }
18
+ return { isValid: true };
19
+ }
20
+ // If no connectionString, host/port/database/etc will be used to build one
21
+ // Make host optional - if neither connectionString nor host is provided,
22
+ // the generator will need to handle it
23
+ if (config.host !== undefined && typeof config.host !== 'string') {
24
+ return { isValid: false, error: 'Host must be a string' };
25
+ }
26
+ if (!config.database || typeof config.database !== 'string') {
27
+ return { isValid: false, error: 'Database name is required' };
28
+ }
29
+ return { isValid: true };
30
+ };
31
+ export var generateMongoDBFetcher = function (config, tableName) {
32
+ var mongoConfig = config;
33
+ var hasUsername = mongoConfig === null || mongoConfig === void 0 ? void 0 : mongoConfig.username;
34
+ var database = mongoConfig === null || mongoConfig === void 0 ? void 0 : mongoConfig.database;
35
+ // Build connection string from parts if not provided
36
+ var connectionString = mongoConfig.connectionString;
37
+ if (!connectionString) {
38
+ connectionString = "mongodb://".concat(hasUsername ? "".concat(mongoConfig.username, ":").concat(mongoConfig.password, "@") : '').concat(mongoConfig.host, ":").concat(mongoConfig.port || 27017, "/").concat(database);
39
+ }
40
+ return "import { MongoClient, ObjectId } from 'mongodb'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n const url = ".concat(replaceSecretReference(connectionString), "\n client = new MongoClient(url, {\n connectTimeoutMS: 30000,\n serverSelectionTimeoutMS: 30000\n })\n \n await client.connect()\n const db = client.db(").concat(JSON.stringify(database), ")\n const collection = db.collection('").concat(tableName, "')\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const filter = {}\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all field names from a sample document\n try {\n const sampleDoc = await db.collection(").concat(JSON.stringify(tableName), ").findOne({})\n if (sampleDoc) {\n columns = Object.keys(sampleDoc).filter(key => key !== '_id')\n }\n } catch (schemaError) {\n console.warn('Failed to fetch sample document for column names:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const orConditions = columns.map((col) => ({\n [col]: { $regex: query, $options: 'i' }\n }))\n filter.$or = orConditions\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (key === '_id') {\n if (Array.isArray(value)) {\n filter[key] = {\n $in: value.map((id) => (typeof id === 'string' ? new ObjectId(id) : id))\n }\n } else if (typeof value === 'string') {\n filter[key] = new ObjectId(value)\n } else {\n filter[key] = value\n }\n } else if (Array.isArray(value)) {\n filter[key] = { $in: value }\n } else {\n filter[key] = value\n }\n })\n }\n \n let cursor = collection.find(filter)\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n cursor = cursor.sort({ [sortBy]: sortOrderValue })\n }\n \n const limitValue = limit || perPage\n const skipValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (skipValue !== undefined) {\n cursor = cursor.skip(skipValue)\n }\n \n if (limitValue) {\n cursor = cursor.limit(parseInt(limitValue))\n }\n \n const documents = await cursor.toArray()\n const safeData = JSON.parse(JSON.stringify(documents))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MongoDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n await client.close()\n }\n }\n}\n");
41
+ };
42
+ // tslint:disable-next-line:variable-name
43
+ export var generateMongoDBCountFetcher = function (_config, tableName) {
44
+ return "\nasync function getCount(req, res) {\n const client = getClient()\n const db = client.db()\n\n try {\n const { query, queryColumns, filters } = req.query\n const collection = db.collection('".concat(tableName, "')\n const filter = {}\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all field names from a sample document\n try {\n const sampleDoc = await collection.findOne({})\n if (sampleDoc) {\n columns = Object.keys(sampleDoc).filter(key => key !== '_id')\n }\n } catch (schemaError) {\n console.warn('Failed to fetch sample document for column names:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n filter.$or = columns.map(col => ({\n [col]: { $regex: query, $options: 'i' }\n }))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const f of parsedFilters) {\n filter[f.column] = f.value\n }\n }\n\n const count = await collection.countDocuments(filter)\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
45
+ };
46
+ //# sourceMappingURL=mongodb.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mongodb.js","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAA;AAEjD,MAAM,CAAC,IAAM,qBAAqB,GAAG,UACnC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,+CAA+C;IAC/C,IAAI,MAAM,CAAC,gBAAgB,EAAE;QAC3B,IAAI,OAAO,MAAM,CAAC,gBAAgB,KAAK,QAAQ,IAAI,MAAM,CAAC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8CAA8C,EAAE,CAAA;SACjF;QAED,uFAAuF;QACvF,IAAM,OAAO,GAAG,MAAM,CAAC,gBAA0B,CAAA;QACjD,IACE,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,CAAC;YAC1C,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC;YACjC,CAAC,OAAO,CAAC,UAAU,CAAC,gBAAgB,CAAC,EACrC;YACA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,0CAA0C,EAAE,CAAA;SAC7E;QAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;KACzB;IAED,2EAA2E;IAC3E,yEAAyE;IACzE,uCAAuC;IACvC,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;QAChE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAA;KAC1D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAWD,MAAM,CAAC,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,WAAW,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IACzC,IAAM,QAAQ,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IAEtC,qDAAqD;IACrD,IAAI,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAA;IACnD,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,oBACjB,WAAW,CAAC,CAAC,CAAC,UAAG,WAAW,CAAC,QAAQ,cAAI,WAAW,CAAC,QAAQ,MAAG,CAAC,CAAC,CAAC,EAAE,SACpE,WAAW,CAAC,IAAI,cAAI,WAAW,CAAC,IAAI,IAAI,KAAK,cAAI,QAAQ,CAAE,CAAA;KAC/D;IAED,OAAO,8JAKS,sBAAsB,CAAC,gBAAgB,CAAC,8LAO/B,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,sDACX,SAAS,8cAeC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,uxEA8E1E,CAAA;AACD,CAAC,CAAA;AAED,yCAAyC;AACzC,MAAM,CAAC,IAAM,2BAA2B,GAAG,UAAC,OAAY,EAAE,SAAiB;IACzE,OAAO,kNAO+B,SAAS,i+CAoDhD,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,3 @@
1
+ export declare const generateMySQLFetcher: (config: Record<string, unknown>, tableName: string) => string;
2
+ export declare const generateMySQLCountFetcher: (config: Record<string, unknown>, tableName: string) => string;
3
+ //# sourceMappingURL=mysql.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mysql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA+IF,CAAA;AAED,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAwEF,CAAA"}
@@ -0,0 +1,25 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var generateMySQLFetcher = function (config, tableName) {
3
+ var mysqlConfig = config;
4
+ var resolvedUser = mysqlConfig.user || mysqlConfig.username || null;
5
+ var hasCustomSSLConfig = !!mysqlConfig.sslConfig;
6
+ var defaultSSLEnabled = mysqlConfig.ssl !== false;
7
+ var database = mysqlConfig.database;
8
+ var sslConfigString = hasCustomSSLConfig
9
+ ? "{\n ".concat(mysqlConfig.sslConfig.ca ? "ca: ".concat(replaceSecretReference(mysqlConfig.sslConfig.ca), ",") : '', "\n ").concat(mysqlConfig.sslConfig.cert
10
+ ? "cert: ".concat(replaceSecretReference(mysqlConfig.sslConfig.cert), ",")
11
+ : '', "\n ").concat(mysqlConfig.sslConfig.key
12
+ ? "key: ".concat(replaceSecretReference(mysqlConfig.sslConfig.key), ",")
13
+ : '', "\n rejectUnauthorized: ").concat(mysqlConfig.sslConfig.rejectUnauthorized !== undefined
14
+ ? mysqlConfig.sslConfig.rejectUnauthorized
15
+ : true, "\n }")
16
+ : defaultSSLEnabled
17
+ ? "{ rejectUnauthorized: true }"
18
+ : 'false';
19
+ return "import mysql from 'mysql2/promise'\n\nlet pool = null\n\nconst getPool = () => {\n if (pool) return pool\n \n pool = mysql.createPool({\n host: ".concat(JSON.stringify(mysqlConfig.host), ",\n port: ").concat(mysqlConfig.port || 3306, ",\n user: ").concat(resolvedUser !== null ? JSON.stringify(resolvedUser) : 'undefined', ",\n password: ").concat(replaceSecretReference(mysqlConfig.password), ",\n database: ").concat(JSON.stringify(mysqlConfig.database), ",\n ssl: ").concat(sslConfigString, "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await pool.promise().query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(${mysql.escapeId(col)} AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`${mysql.escapeId(key)} IN (${placeholders})`)\n } else {\n conditions.push(`${mysql.escapeId(key)} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM ${mysql.escapeId('").concat(tableName, "')}`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${mysql.escapeId(sortBy)} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const [rows] = await pool.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MySQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
20
+ };
21
+ export var generateMySQLCountFetcher = function (config, tableName) {
22
+ var mysqlConfig = config;
23
+ return "\nasync function getCount(req, res) {\n const connection = getConnection()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await connection.execute(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [".concat(JSON.stringify(mysqlConfig.database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `CAST(${col} AS CHAR) LIKE ?`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} ?`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) as count FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const [rows] = await connection.execute(countSql, queryParams)\n const count = rows[0].count\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
24
+ };
25
+ //# sourceMappingURL=mysql.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mysql.js","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAA;AAajD,MAAM,CAAC,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,YAAY,GAAG,WAAW,CAAC,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,IAAI,CAAA;IACrE,IAAM,kBAAkB,GAAG,CAAC,CAAC,WAAW,CAAC,SAAS,CAAA;IAClD,IAAM,iBAAiB,GAAG,WAAW,CAAC,GAAG,KAAK,KAAK,CAAA;IACnD,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,IAAM,eAAe,GAAG,kBAAkB;QACxC,CAAC,CAAC,mBACE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,sBAAsB,CAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAE1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,sBAAsB,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,qBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,sBAAsB,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,yCAGN,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,SAAS;YACpD,CAAC,CAAC,WAAW,CAAC,SAAS,CAAC,kBAAkB;YAC1C,CAAC,CAAC,IAAI,YAEV;QACF,CAAC,CAAC,iBAAiB;YACnB,CAAC,CAAC,8BAA8B;YAChC,CAAC,CAAC,OAAO,CAAA;IAEX,OAAO,gKAQG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,0BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,0BACxB,YAAY,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,8BAC9D,sBAAsB,CAAC,WAAW,CAAC,QAAQ,CAAC,8BAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,yBACzC,eAAe,uxBA2BX,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+mCA8BlB,SAAS,qsCA0C3D,CAAA;AACD,CAAC,CAAA;AAED,MAAM,CAAC,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IAEzC,OAAO,0xBAsBM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwB5B,SAAS,yiBAsB7D,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,3 @@
1
+ export declare const generatePostgreSQLFetcher: (config: Record<string, unknown>, tableName: string) => string;
2
+ export declare const generatePostgreSQLCountFetcher: (config: Record<string, unknown>, tableName: string) => string;
3
+ //# sourceMappingURL=postgresql.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"postgresql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":"AAcA,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA8IF,CAAA;AAED,eAAO,MAAM,8BAA8B,WACjC,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkFF,CAAA"}
@@ -0,0 +1,20 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var generatePostgreSQLFetcher = function (config, tableName) {
3
+ var _a;
4
+ var pgConfig = config;
5
+ var schema = (_a = pgConfig.options) === null || _a === void 0 ? void 0 : _a.schema;
6
+ return "import { Pool } from 'pg'\n\nlet pool = null\n\nconst getPool = () => {\n if (pool) return pool\n \n pool = new Pool({\n host: ".concat(JSON.stringify(pgConfig.host), ",\n port: ").concat(pgConfig.port || 5432, ",\n user: ").concat(JSON.stringify(pgConfig.user || pgConfig.username), ",\n password: ").concat(replaceSecretReference(pgConfig.password), ",\n database: ").concat(JSON.stringify(pgConfig.database), ",\n ssl: ").concat(pgConfig.ssl === false
7
+ ? 'false'
8
+ : pgConfig.sslConfig
9
+ ? "{\n ".concat(pgConfig.sslConfig.ca ? "ca: ".concat(replaceSecretReference(pgConfig.sslConfig.ca), ",") : '', "\n ").concat(pgConfig.sslConfig.cert ? "cert: ".concat(replaceSecretReference(pgConfig.sslConfig.cert), ",") : '', "\n ").concat(pgConfig.sslConfig.key ? "key: ".concat(replaceSecretReference(pgConfig.sslConfig.key), ",") : '', "\n rejectUnauthorized: false\n }")
10
+ : '{ rejectUnauthorized: false }', "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n ").concat(schema ? "await pool.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ").concat(schema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = schema \n ? [").concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]\n : [").concat(JSON.stringify(tableName), "]\n \n const schemaResult = await pool.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await pool.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('PostgreSQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
11
+ };
12
+ export var generatePostgreSQLCountFetcher = function (config, tableName) {
13
+ var _a;
14
+ var pgConfig = config;
15
+ var hasSchema = !!((_a = pgConfig.options) === null || _a === void 0 ? void 0 : _a.schema);
16
+ return "\nasync function getCount(req, res) {\n const pool = getPool()\n\n try {\n const { query, queryColumns, filters } = req.query\n const conditions = []\n const queryParams = []\n let paramIndex = 1\n\n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ".concat(hasSchema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = ").concat(hasSchema
17
+ ? "[".concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(pgConfig.options.schema), "]")
18
+ : "[".concat(JSON.stringify(tableName), "]"), "\n \n const schemaResult = await pool.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map(col => `${col}::text ILIKE $${paramIndex++}`).join(' OR ')\n conditions.push(`(${searchConditions})`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n }\n }\n\n if (filters) {\n const parsedFilters = JSON.parse(filters)\n for (const filter of parsedFilters) {\n conditions.push(`${filter.column} ${filter.operator} $${paramIndex++}`)\n queryParams.push(filter.value)\n }\n }\n\n let countSql = `SELECT COUNT(*) FROM ").concat(tableName, "`\n if (conditions.length > 0) {\n countSql += ` WHERE ${conditions.join(' AND ')}`\n }\n\n const result = await pool.query(countSql, queryParams)\n const count = parseInt(result.rows[0].count, 10)\n\n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n");
19
+ };
20
+ //# sourceMappingURL=postgresql.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"postgresql.js","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAA;AAcjD,MAAM,CAAC,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,MAAM,GAAG,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA;IAEvC,OAAO,+IAQG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,0BAC7B,QAAQ,CAAC,IAAI,IAAI,IAAI,0BACrB,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,QAAQ,CAAC,QAAQ,CAAC,8BAC9C,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,8BACzC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,yBAE3C,QAAQ,CAAC,GAAG,KAAK,KAAK;QACpB,CAAC,CAAC,OAAO;QACT,CAAC,CAAC,QAAQ,CAAC,SAAS;YACpB,CAAC,CAAC,mBACF,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,sBAAsB,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACpF,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,sBAAsB,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAC1F,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,sBAAsB,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,6CAEzF;YACE,CAAC,CAAC,+BAA+B,yIAUnC,MAAM,CAAC,CAAC,CAAC,+CAAwC,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,gnBAqBxD,MAAM,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,0HAIlC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,+BACpD,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,uxCAoCV,SAAS,8rCA0CxC,CAAA;AACD,CAAC,CAAA;AAED,MAAM,CAAC,IAAM,8BAA8B,GAAG,UAC5C,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,SAAS,GAAG,CAAC,CAAC,CAAA,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA,CAAA;IAE5C,OAAO,ytBAuBK,SAAS,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,kGAI1C,SAAS;QACP,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAQ,CAAC,MAAM,CAAC,MAAG;QAC/E,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,87BA0BJ,SAAS,sjBAsBpD,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,6 @@
1
+ export declare const validateRedisConfig: (config: Record<string, unknown>) => {
2
+ isValid: boolean;
3
+ error?: string;
4
+ };
5
+ export declare const generateRedisFetcher: (config: Record<string, unknown>) => string;
6
+ //# sourceMappingURL=redis.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"redis.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redis.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA8BpC,CAAA;AAYD,eAAO,MAAM,oBAAoB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAyGtE,CAAA"}
@@ -0,0 +1,41 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var validateRedisConfig = function (config) {
3
+ if (!config || typeof config !== 'object') {
4
+ return { isValid: false, error: 'Config must be a valid object' };
5
+ }
6
+ // If connectionString is provided, validate it
7
+ if (config.connectionString) {
8
+ if (typeof config.connectionString !== 'string' || config.connectionString.trim() === '') {
9
+ return { isValid: false, error: 'Connection string must be a non-empty string' };
10
+ }
11
+ // Only validate format if it's not a secret reference that will be resolved at runtime
12
+ var connStr = config.connectionString;
13
+ if (!connStr.startsWith('teleporthq.secrets.') &&
14
+ !connStr.startsWith('redis://') &&
15
+ !connStr.startsWith('rediss://')) {
16
+ return { isValid: false, error: 'Invalid Redis connection string format' };
17
+ }
18
+ return { isValid: true };
19
+ }
20
+ // If no connectionString, host/port/etc will be used to build one
21
+ if (!config.host || typeof config.host !== 'string') {
22
+ return { isValid: false, error: 'Redis host is required when connectionString is not provided' };
23
+ }
24
+ return { isValid: true };
25
+ };
26
+ export var generateRedisFetcher = function (config) {
27
+ var redisConfig = config;
28
+ var host = redisConfig.host;
29
+ var port = redisConfig.port;
30
+ var username = redisConfig.username;
31
+ var password = redisConfig.password;
32
+ var database = redisConfig.database;
33
+ var hasUsername = username;
34
+ // Build connection string from parts if not provided
35
+ var connectionString = redisConfig.connectionString;
36
+ if (!connectionString) {
37
+ connectionString = "redis://".concat(hasUsername ? "".concat(username, ":").concat(password, "@") : '').concat(host, ":").concat(port || 6379);
38
+ }
39
+ return "import { createClient } from 'redis'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ".concat(replaceSecretReference(connectionString)).concat(database ? ",\n database: ".concat(database) : '', "\n })\n \n await client.connect()\n \n const { query, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const pattern = (filters && JSON.parse(filters).pattern) || query || '*'\n const keys = await client.keys(pattern)\n \n const limitValue = limit || perPage || 100\n const skipValue = offset !== undefined ? parseInt(offset) : ((parseInt(page) || 1) - 1) * parseInt(limitValue)\n const paginatedKeys = keys.slice(skipValue, skipValue + parseInt(limitValue))\n \n const results = []\n for (const key of paginatedKeys) {\n const type = await client.type(key)\n const ttl = await client.ttl(key)\n let value\n \n switch (type) {\n case 'string':\n value = await client.get(key)\n break\n case 'list':\n value = await client.lRange(key, 0, -1)\n break\n case 'set':\n value = await client.sMembers(key)\n break\n case 'zset':\n value = await client.zRange(key, 0, -1)\n break\n case 'hash':\n value = await client.hGetAll(key)\n break\n default:\n value = null\n }\n \n results.push({\n key,\n type,\n value,\n ttl: ttl === -1 ? null : ttl\n })\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n results.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const safeData = JSON.parse(JSON.stringify(results))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redis fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n await client.quit()\n }\n }\n}\n");
40
+ };
41
+ //# sourceMappingURL=redis.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"redis.js","sourceRoot":"","sources":["../../../src/fetchers/redis.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAA;AAEjD,MAAM,CAAC,IAAM,mBAAmB,GAAG,UACjC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,+CAA+C;IAC/C,IAAI,MAAM,CAAC,gBAAgB,EAAE;QAC3B,IAAI,OAAO,MAAM,CAAC,gBAAgB,KAAK,QAAQ,IAAI,MAAM,CAAC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8CAA8C,EAAE,CAAA;SACjF;QAED,uFAAuF;QACvF,IAAM,OAAO,GAAG,MAAM,CAAC,gBAA0B,CAAA;QACjD,IACE,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,CAAC;YAC1C,CAAC,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC;YAC/B,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,CAAC,EAChC;YACA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,wCAAwC,EAAE,CAAA;SAC3E;QAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;KACzB;IAED,kEAAkE;IAClE,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;QACnD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8DAA8D,EAAE,CAAA;KACjG;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAYD,MAAM,CAAC,IAAM,oBAAoB,GAAG,UAAC,MAA+B;IAClE,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAA;IAC7B,IAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAA;IAC7B,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,WAAW,GAAG,QAAQ,CAAA;IAE5B,qDAAqD;IACrD,IAAI,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAA;IACnD,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,kBAAW,WAAW,CAAC,CAAC,CAAC,UAAG,QAAQ,cAAI,QAAQ,MAAG,CAAC,CAAC,CAAC,EAAE,SAAG,IAAI,cAChF,IAAI,IAAI,IAAI,CACZ,CAAA;KACH;IAED,OAAO,2KAMI,sBAAsB,CAAC,gBAAgB,CAAC,SACjD,QAAQ,CAAC,CAAC,CAAC,6BAAsB,QAAQ,CAAE,CAAC,CAAC,CAAC,EAAE,umEAgFnD,CAAA;AACD,CAAC,CAAA"}
@@ -0,0 +1,2 @@
1
+ export declare const generateRedshiftFetcher: (config: Record<string, unknown>, tableName: string) => string;
2
+ //# sourceMappingURL=redshift.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAyHF,CAAA"}
@@ -0,0 +1,20 @@
1
+ import { replaceSecretReference } from '../utils';
2
+ export var generateRedshiftFetcher = function (config, tableName) {
3
+ var _a;
4
+ var redshiftConfig = config;
5
+ var host = redshiftConfig.host;
6
+ var port = redshiftConfig.port;
7
+ var user = redshiftConfig.user;
8
+ var password = redshiftConfig.password;
9
+ var database = redshiftConfig.database;
10
+ var ssl = redshiftConfig.ssl;
11
+ var sslConfig = redshiftConfig.sslConfig;
12
+ var schema = (_a = redshiftConfig.options) === null || _a === void 0 ? void 0 : _a.schema;
13
+ return "import { Pool } from 'pg'\n\nlet pool = null\n\nconst getPool = () => {\n if (pool) return pool\n \n pool = new Pool({\n host: ".concat(JSON.stringify(host), ",\n port: ").concat(port || 5439, ",\n user: ").concat(JSON.stringify(user), ",\n password: ").concat(replaceSecretReference(password), ",\n database: ").concat(JSON.stringify(database), ",\n ssl: ").concat(ssl === false
14
+ ? '{ rejectUnauthorized: false }'
15
+ : sslConfig
16
+ ? "{\n ".concat(sslConfig.ca ? "ca: ".concat(replaceSecretReference(sslConfig.ca), ",") : '', "\n ").concat(sslConfig.cert ? "cert: ".concat(replaceSecretReference(sslConfig.cert), ",") : '', "\n ").concat(sslConfig.key ? "key: ".concat(replaceSecretReference(sslConfig.key), ",") : '', "\n rejectUnauthorized: ").concat(sslConfig.rejectUnauthorized !== false, "\n }")
17
+ : '{ rejectUnauthorized: false }' // Default to SSL with no cert verification for Redshift
18
+ , "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n ").concat(schema ? "await pool.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query && queryColumns) {\n const columns = JSON.parse(queryColumns)\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await pool.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
19
+ };
20
+ //# sourceMappingURL=redshift.js.map