@teleporthq/teleport-plugin-next-data-source 0.42.8 → 0.42.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/fetchers/airtable.d.ts.map +1 -1
- package/dist/cjs/fetchers/airtable.js +2 -2
- package/dist/cjs/fetchers/airtable.js.map +1 -1
- package/dist/cjs/fetchers/clickhouse.d.ts.map +1 -1
- package/dist/cjs/fetchers/clickhouse.js +1 -1
- package/dist/cjs/fetchers/clickhouse.js.map +1 -1
- package/dist/cjs/fetchers/csv-file.d.ts.map +1 -1
- package/dist/cjs/fetchers/csv-file.js +2 -1
- package/dist/cjs/fetchers/csv-file.js.map +1 -1
- package/dist/cjs/fetchers/firestore.d.ts.map +1 -1
- package/dist/cjs/fetchers/firestore.js +1 -1
- package/dist/cjs/fetchers/firestore.js.map +1 -1
- package/dist/cjs/fetchers/google-sheets.d.ts.map +1 -1
- package/dist/cjs/fetchers/google-sheets.js +1 -1
- package/dist/cjs/fetchers/google-sheets.js.map +1 -1
- package/dist/cjs/fetchers/javascript.d.ts.map +1 -1
- package/dist/cjs/fetchers/javascript.js +2 -1
- package/dist/cjs/fetchers/javascript.js.map +1 -1
- package/dist/cjs/fetchers/mariadb.d.ts.map +1 -1
- package/dist/cjs/fetchers/mariadb.js +2 -2
- package/dist/cjs/fetchers/mariadb.js.map +1 -1
- package/dist/cjs/fetchers/mongodb.d.ts.map +1 -1
- package/dist/cjs/fetchers/mongodb.js +1 -1
- package/dist/cjs/fetchers/mongodb.js.map +1 -1
- package/dist/cjs/fetchers/mysql.d.ts.map +1 -1
- package/dist/cjs/fetchers/mysql.js +1 -1
- package/dist/cjs/fetchers/mysql.js.map +1 -1
- package/dist/cjs/fetchers/postgresql.d.ts.map +1 -1
- package/dist/cjs/fetchers/postgresql.js +1 -1
- package/dist/cjs/fetchers/postgresql.js.map +1 -1
- package/dist/cjs/fetchers/redis.d.ts.map +1 -1
- package/dist/cjs/fetchers/redis.js +1 -1
- package/dist/cjs/fetchers/redis.js.map +1 -1
- package/dist/cjs/fetchers/redshift.d.ts.map +1 -1
- package/dist/cjs/fetchers/redshift.js +2 -2
- package/dist/cjs/fetchers/redshift.js.map +1 -1
- package/dist/cjs/fetchers/rest-api.d.ts.map +1 -1
- package/dist/cjs/fetchers/rest-api.js +3 -2
- package/dist/cjs/fetchers/rest-api.js.map +1 -1
- package/dist/cjs/fetchers/static-collection.d.ts.map +1 -1
- package/dist/cjs/fetchers/static-collection.js +2 -1
- package/dist/cjs/fetchers/static-collection.js.map +1 -1
- package/dist/cjs/fetchers/supabase.d.ts.map +1 -1
- package/dist/cjs/fetchers/supabase.js +1 -1
- package/dist/cjs/fetchers/supabase.js.map +1 -1
- package/dist/cjs/fetchers/turso.d.ts.map +1 -1
- package/dist/cjs/fetchers/turso.js +1 -1
- package/dist/cjs/fetchers/turso.js.map +1 -1
- package/dist/cjs/tsconfig.tsbuildinfo +1 -1
- package/dist/cjs/utils.d.ts +1 -0
- package/dist/cjs/utils.d.ts.map +1 -1
- package/dist/cjs/utils.js +5 -1
- package/dist/cjs/utils.js.map +1 -1
- package/dist/esm/fetchers/airtable.d.ts.map +1 -1
- package/dist/esm/fetchers/airtable.js +3 -3
- package/dist/esm/fetchers/airtable.js.map +1 -1
- package/dist/esm/fetchers/clickhouse.d.ts.map +1 -1
- package/dist/esm/fetchers/clickhouse.js +2 -2
- package/dist/esm/fetchers/clickhouse.js.map +1 -1
- package/dist/esm/fetchers/csv-file.d.ts.map +1 -1
- package/dist/esm/fetchers/csv-file.js +2 -1
- package/dist/esm/fetchers/csv-file.js.map +1 -1
- package/dist/esm/fetchers/firestore.d.ts.map +1 -1
- package/dist/esm/fetchers/firestore.js +2 -2
- package/dist/esm/fetchers/firestore.js.map +1 -1
- package/dist/esm/fetchers/google-sheets.d.ts.map +1 -1
- package/dist/esm/fetchers/google-sheets.js +1 -1
- package/dist/esm/fetchers/google-sheets.js.map +1 -1
- package/dist/esm/fetchers/javascript.d.ts.map +1 -1
- package/dist/esm/fetchers/javascript.js +2 -1
- package/dist/esm/fetchers/javascript.js.map +1 -1
- package/dist/esm/fetchers/mariadb.d.ts.map +1 -1
- package/dist/esm/fetchers/mariadb.js +3 -3
- package/dist/esm/fetchers/mariadb.js.map +1 -1
- package/dist/esm/fetchers/mongodb.d.ts.map +1 -1
- package/dist/esm/fetchers/mongodb.js +2 -2
- package/dist/esm/fetchers/mongodb.js.map +1 -1
- package/dist/esm/fetchers/mysql.d.ts.map +1 -1
- package/dist/esm/fetchers/mysql.js +2 -2
- package/dist/esm/fetchers/mysql.js.map +1 -1
- package/dist/esm/fetchers/postgresql.d.ts.map +1 -1
- package/dist/esm/fetchers/postgresql.js +2 -2
- package/dist/esm/fetchers/postgresql.js.map +1 -1
- package/dist/esm/fetchers/redis.d.ts.map +1 -1
- package/dist/esm/fetchers/redis.js +2 -2
- package/dist/esm/fetchers/redis.js.map +1 -1
- package/dist/esm/fetchers/redshift.d.ts.map +1 -1
- package/dist/esm/fetchers/redshift.js +3 -3
- package/dist/esm/fetchers/redshift.js.map +1 -1
- package/dist/esm/fetchers/rest-api.d.ts.map +1 -1
- package/dist/esm/fetchers/rest-api.js +3 -2
- package/dist/esm/fetchers/rest-api.js.map +1 -1
- package/dist/esm/fetchers/static-collection.d.ts.map +1 -1
- package/dist/esm/fetchers/static-collection.js +2 -1
- package/dist/esm/fetchers/static-collection.js.map +1 -1
- package/dist/esm/fetchers/supabase.d.ts.map +1 -1
- package/dist/esm/fetchers/supabase.js +2 -2
- package/dist/esm/fetchers/supabase.js.map +1 -1
- package/dist/esm/fetchers/turso.d.ts.map +1 -1
- package/dist/esm/fetchers/turso.js +2 -2
- package/dist/esm/fetchers/turso.js.map +1 -1
- package/dist/esm/tsconfig.tsbuildinfo +1 -1
- package/dist/esm/utils.d.ts +1 -0
- package/dist/esm/utils.d.ts.map +1 -1
- package/dist/esm/utils.js +3 -0
- package/dist/esm/utils.js.map +1 -1
- package/package.json +2 -2
- package/src/fetchers/airtable.ts +4 -2
- package/src/fetchers/clickhouse.ts +4 -2
- package/src/fetchers/csv-file.ts +5 -1
- package/src/fetchers/firestore.ts +4 -2
- package/src/fetchers/google-sheets.ts +109 -9
- package/src/fetchers/javascript.ts +6 -2
- package/src/fetchers/mariadb.ts +4 -2
- package/src/fetchers/mongodb.ts +4 -2
- package/src/fetchers/mysql.ts +4 -2
- package/src/fetchers/postgresql.ts +4 -2
- package/src/fetchers/redis.ts +4 -2
- package/src/fetchers/redshift.ts +4 -2
- package/src/fetchers/rest-api.ts +5 -1
- package/src/fetchers/static-collection.ts +5 -1
- package/src/fetchers/supabase.ts +4 -2
- package/src/fetchers/turso.ts +4 -2
- package/src/utils.ts +30 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"airtable.d.ts","sourceRoot":"","sources":["../../../src/fetchers/airtable.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAcpC,CAAA;AAQD,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"airtable.d.ts","sourceRoot":"","sources":["../../../src/fetchers/airtable.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAcpC,CAAA;AAQD,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA6HF,CAAA"}
|
|
@@ -19,9 +19,9 @@ var generateAirtableFetcher = function (config, tableName) {
|
|
|
19
19
|
var airtableConfig = config;
|
|
20
20
|
var baseId = airtableConfig.baseId;
|
|
21
21
|
var personalAccessToken = airtableConfig.personalAccessToken;
|
|
22
|
-
return "import fetch from 'node-fetch'\n\nexport default async function handler(req, res) {\n try {\n const { query, view, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n const queryParams = new URLSearchParams()\n \n if (view) {\n queryParams.append('view', view)\n }\n \n if (sortBy) {\n queryParams.append('sort[0][field]', sortBy)\n queryParams.append('sort[0][direction]', sortOrder || 'asc')\n }\n \n const perPageValue = limit || perPage || 100\n queryParams.append('pageSize', Math.min(parseInt(perPageValue), 100).toString())\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n const conditions = Object.entries(parsedFilters).map(([field, value]) => {\n if (Array.isArray(value)) {\n const arrayConditions = value.map((v) => {\n if (typeof v === 'string') {\n return `{${field}}='${v.replace(/'/g, \"\\\\'\")}'`\n } else if (typeof v === 'number') {\n return `{${field}}=${v}`\n } else if (typeof v === 'boolean') {\n return `{${field}}=${v ? 'TRUE()' : 'FALSE()'}`\n }\n return `{${field}}='${String(v)}'`\n })\n return arrayConditions.length > 1\n ? `OR(${arrayConditions.join(',')})`\n : arrayConditions[0]\n } else if (typeof value === 'string') {\n return `{${field}}='${value.replace(/'/g, \"\\\\'\")}'`\n } else if (typeof value === 'number') {\n return `{${field}}=${value}`\n } else if (typeof value === 'boolean') {\n return `{${field}}=${value ? 'TRUE()' : 'FALSE()'}`\n }\n return `{${field}}='${String(value)}'`\n })\n \n const filterFormula = conditions.length > 1 ? `AND(${conditions.join(',')})` : conditions[0]\n if (filterFormula) {\n queryParams.append('filterByFormula', filterFormula)\n }\n }\n \n let url = `https://api.airtable.com/v0/".concat(baseId, "/${encodeURIComponent('").concat(tableName, "')}`\n if (queryParams.toString()) {\n url += `?${queryParams.toString()}`\n }\n \n const allRecords = []\n let airtableOffset\n const skipValue = offsetParam !== undefined ? parseInt(offsetParam) : (page ? (parseInt(page) - 1) * parseInt(perPageValue) : 0)\n const totalRecordsNeeded = skipValue + parseInt(perPageValue)\n \n do {\n const fetchUrl = airtableOffset ? `${url}&offset=${airtableOffset}` : url\n const response = await fetch(fetchUrl, {\n method: 'GET',\n headers: {\n Authorization: `Bearer ").concat((0, utils_1.replaceSecretReference)(personalAccessToken, {
|
|
22
|
+
return "import fetch from 'node-fetch'\n\n".concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n try {\n const { query, view, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n const queryParams = new URLSearchParams()\n \n if (view) {\n queryParams.append('view', view)\n }\n \n if (sortBy) {\n queryParams.append('sort[0][field]', sortBy)\n queryParams.append('sort[0][direction]', sortOrder || 'asc')\n }\n \n const perPageValue = limit || perPage || 100\n queryParams.append('pageSize', Math.min(parseInt(perPageValue), 100).toString())\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n const conditions = Object.entries(parsedFilters).map(([field, value]) => {\n if (Array.isArray(value)) {\n const arrayConditions = value.map((v) => {\n if (typeof v === 'string') {\n return `{${field}}='${v.replace(/'/g, \"\\\\'\")}'`\n } else if (typeof v === 'number') {\n return `{${field}}=${v}`\n } else if (typeof v === 'boolean') {\n return `{${field}}=${v ? 'TRUE()' : 'FALSE()'}`\n }\n return `{${field}}='${String(v)}'`\n })\n return arrayConditions.length > 1\n ? `OR(${arrayConditions.join(',')})`\n : arrayConditions[0]\n } else if (typeof value === 'string') {\n return `{${field}}='${value.replace(/'/g, \"\\\\'\")}'`\n } else if (typeof value === 'number') {\n return `{${field}}=${value}`\n } else if (typeof value === 'boolean') {\n return `{${field}}=${value ? 'TRUE()' : 'FALSE()'}`\n }\n return `{${field}}='${String(value)}'`\n })\n \n const filterFormula = conditions.length > 1 ? `AND(${conditions.join(',')})` : conditions[0]\n if (filterFormula) {\n queryParams.append('filterByFormula', filterFormula)\n }\n }\n \n let url = `https://api.airtable.com/v0/").concat(baseId, "/${encodeURIComponent('").concat(tableName, "')}`\n if (queryParams.toString()) {\n url += `?${queryParams.toString()}`\n }\n \n const allRecords = []\n let airtableOffset\n const skipValue = offsetParam !== undefined ? parseInt(offsetParam) : (page ? (parseInt(page) - 1) * parseInt(perPageValue) : 0)\n const totalRecordsNeeded = skipValue + parseInt(perPageValue)\n \n do {\n const fetchUrl = airtableOffset ? `${url}&offset=${airtableOffset}` : url\n const response = await fetch(fetchUrl, {\n method: 'GET',\n headers: {\n Authorization: `Bearer ").concat((0, utils_1.replaceSecretReference)(personalAccessToken, {
|
|
23
23
|
templateLiteral: true,
|
|
24
|
-
}), "`,\n 'Content-Type': 'application/json'\n }\n })\n \n if (!response.ok) {\n const errorData = await response.json().catch(() => ({}))\n return res.status(response.status).json({\n success: false,\n error: errorData.error?.message || `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n const data = await response.json()\n allRecords.push(...data.records)\n airtableOffset = data.offset\n \n if (allRecords.length >= totalRecordsNeeded || !airtableOffset) {\n break\n }\n } while (airtableOffset)\n \n const paginatedRecords = allRecords.slice(skipValue, skipValue + parseInt(perPageValue))\n \n const formattedRecords = paginatedRecords.map((record) => ({\n id: record.id,\n ...record.fields,\n createdTime: record.createdTime\n }))\n \n const safeData = JSON.parse(JSON.stringify(formattedRecords))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Airtable fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
24
|
+
}), "`,\n 'Content-Type': 'application/json'\n }\n })\n \n if (!response.ok) {\n const errorData = await response.json().catch(() => ({}))\n return res.status(response.status).json({\n success: false,\n error: errorData.error?.message || `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n const data = await response.json()\n allRecords.push(...data.records)\n airtableOffset = data.offset\n \n if (allRecords.length >= totalRecordsNeeded || !airtableOffset) {\n break\n }\n } while (airtableOffset)\n \n const paginatedRecords = allRecords.slice(skipValue, skipValue + parseInt(perPageValue))\n \n const formattedRecords = paginatedRecords.map((record) => ({\n id: record.id,\n ...record.fields,\n createdTime: record.createdTime\n }))\n \n const safeData = JSON.parse(JSON.stringify(formattedRecords, dateReplacer))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Airtable fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
25
25
|
};
|
|
26
26
|
exports.generateAirtableFetcher = generateAirtableFetcher;
|
|
27
27
|
//# sourceMappingURL=airtable.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"airtable.js","sourceRoot":"","sources":["../../../src/fetchers/airtable.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"airtable.js","sourceRoot":"","sources":["../../../src/fetchers/airtable.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAErE,IAAM,sBAAsB,GAAG,UACpC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QACtF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8BAA8B,EAAE,CAAA;KACjE;IAED,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,OAAO,MAAM,CAAC,mBAAmB,KAAK,QAAQ,EAAE;QACjF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4CAA4C,EAAE,CAAA;KAC/E;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAhBY,QAAA,sBAAsB,0BAgBlC;AAQM,IAAM,uBAAuB,GAAG,UACrC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,cAAc,GAAG,MAAwB,CAAA;IAC/C,IAAM,MAAM,GAAG,cAAc,CAAC,MAAM,CAAA;IACpC,IAAM,mBAAmB,GAAG,cAAc,CAAC,mBAAmB,CAAA;IAE9D,OAAO,4CAEP,IAAA,iCAAyB,GAAE,m+DAqDiB,MAAM,oCAA2B,SAAS,wkBAepD,IAAA,8BAAsB,EAAC,mBAAmB,EAAE;QACpE,eAAe,EAAE,IAAI;KACtB,CAAC,+1CA+CX,CAAA;AACD,CAAC,CAAA;AAhIY,QAAA,uBAAuB,2BAgInC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"clickhouse.d.ts","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAkBpC,CAAA;AAQD,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"clickhouse.d.ts","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAkBpC,CAAA;AAQD,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAqGF,CAAA"}
|
|
@@ -23,7 +23,7 @@ var generateClickHouseFetcher = function (config, tableName) {
|
|
|
23
23
|
var url = clickConfig.url;
|
|
24
24
|
var username = clickConfig.username;
|
|
25
25
|
var password = clickConfig.password;
|
|
26
|
-
return "import { createClient } from '@clickhouse/client'\n\nlet client = null\n\nconst getClient = () => {\n if (client) return client\n \n client = createClient({\n url: ".concat(JSON.stringify(url), ",\n username: ").concat(JSON.stringify(username), ",\n password: ").concat((0, utils_1.replaceSecretReference)(password), "\n })\n \n return client\n}\n\nexport default async function handler(req, res) {\n try {\n const client = getClient()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n const searchConditions = columns.map(\n (col) => `positionCaseInsensitive(toString(${col}), '${query}') > 0`\n )\n conditions.push(`(${searchConditions.join(' OR ')})`)\n } else {\n // Note: Without queryColumns, ClickHouse can't search all columns efficiently\n // Users should provide queryColumns for optimal search performance\n console.warn('Search query provided without queryColumns - search may not work as expected')\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const formattedValues = value\n .map((v) => (typeof v === 'string' ? `'${v}'` : v))\n .join(', ')\n conditions.push(`${key} IN (${formattedValues})`)\n } else if (typeof value === 'string') {\n conditions.push(`${key} = '${value}'`)\n } else {\n conditions.push(`${key} = ${value}`)\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query({ query: sql })\n const resultResponse = await result.json()\n const safeData = JSON.parse(JSON.stringify(resultResponse.data))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('ClickHouse fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
26
|
+
return "import { createClient } from '@clickhouse/client'\n\nlet client = null\n\nconst getClient = () => {\n if (client) return client\n \n client = createClient({\n url: ".concat(JSON.stringify(url), ",\n username: ").concat(JSON.stringify(username), ",\n password: ").concat((0, utils_1.replaceSecretReference)(password), "\n })\n \n return client\n}\n\n").concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n try {\n const client = getClient()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n const searchConditions = columns.map(\n (col) => `positionCaseInsensitive(toString(${col}), '${query}') > 0`\n )\n conditions.push(`(${searchConditions.join(' OR ')})`)\n } else {\n // Note: Without queryColumns, ClickHouse can't search all columns efficiently\n // Users should provide queryColumns for optimal search performance\n console.warn('Search query provided without queryColumns - search may not work as expected')\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const formattedValues = value\n .map((v) => (typeof v === 'string' ? `'${v}'` : v))\n .join(', ')\n conditions.push(`${key} IN (${formattedValues})`)\n } else if (typeof value === 'string') {\n conditions.push(`${key} = '${value}'`)\n } else {\n conditions.push(`${key} = ${value}`)\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query({ query: sql })\n const resultResponse = await result.json()\n const safeData = JSON.parse(JSON.stringify(resultResponse.data, dateReplacer))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('ClickHouse fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
27
27
|
};
|
|
28
28
|
exports.generateClickHouseFetcher = generateClickHouseFetcher;
|
|
29
29
|
//# sourceMappingURL=clickhouse.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"clickhouse.js","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"clickhouse.js","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAErE,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ,EAAE;QACjD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;KAC/D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iCAAiC,EAAE,CAAA;KACpE;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iCAAiC,EAAE,CAAA;KACpE;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AApBY,QAAA,wBAAwB,4BAoBpC;AAQM,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAA0B,CAAA;IAC9C,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAA;IAC3B,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,oLAQE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,8BACd,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,8BACxB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,+CAM9C,IAAA,iCAAyB,GAAE,wgDAuCG,SAAS,ilCAuCxC,CAAA;AACD,CAAC,CAAA;AAxGY,QAAA,yBAAyB,6BAwGrC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"csv-file.d.ts","sourceRoot":"","sources":["../../../src/fetchers/csv-file.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"csv-file.d.ts","sourceRoot":"","sources":["../../../src/fetchers/csv-file.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,iBAAiB,WACpB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAuBpC,CAAA;AAOD,eAAO,MAAM,sBAAsB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAkFxE,CAAA;AAGD,eAAO,MAAM,uBAAuB,YAAa,GAAG,KAAG,MA+CtD,CAAA"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.generateCSVCountFetcher = exports.generateCSVFileFetcher = exports.validateCSVConfig = void 0;
|
|
4
|
+
var utils_1 = require("../utils");
|
|
4
5
|
var validateCSVConfig = function (config) {
|
|
5
6
|
if (!config || typeof config !== 'object') {
|
|
6
7
|
return { isValid: false, error: 'Config must be a valid object' };
|
|
@@ -25,7 +26,7 @@ var validateCSVConfig = function (config) {
|
|
|
25
26
|
exports.validateCSVConfig = validateCSVConfig;
|
|
26
27
|
var generateCSVFileFetcher = function (config) {
|
|
27
28
|
var csvConfig = config;
|
|
28
|
-
return "const data = ".concat(JSON.stringify(csvConfig.parsedData || []), "\n\nexport default async function handler(req, res) {\n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n let filteredData = [...data]\n \n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n const columns = JSON.parse(queryColumns)\n filteredData = filteredData.filter((item) => {\n return columns.some((col) => {\n const value = item[col]\n return value && String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n filteredData = filteredData.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n filteredData = filteredData.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n }\n \n if (sortBy) {\n filteredData.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const limitValue = limit || perPage\n const offsetValue = offsetParam !== undefined ? parseInt(offsetParam) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n filteredData = filteredData.slice(offsetValue, offsetValue + parseInt(limitValue))\n }\n \n const safeData = JSON.parse(JSON.stringify(filteredData))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('CSV fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
29
|
+
return "const data = ".concat(JSON.stringify(csvConfig.parsedData || []), "\n\n").concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n let filteredData = [...data]\n \n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n const columns = JSON.parse(queryColumns)\n filteredData = filteredData.filter((item) => {\n return columns.some((col) => {\n const value = item[col]\n return value && String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n filteredData = filteredData.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n filteredData = filteredData.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n }\n \n if (sortBy) {\n filteredData.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const limitValue = limit || perPage\n const offsetValue = offsetParam !== undefined ? parseInt(offsetParam) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n filteredData = filteredData.slice(offsetValue, offsetValue + parseInt(limitValue))\n }\n \n const safeData = JSON.parse(JSON.stringify(filteredData, dateReplacer))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('CSV fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
29
30
|
};
|
|
30
31
|
exports.generateCSVFileFetcher = generateCSVFileFetcher;
|
|
31
32
|
// tslint:disable-next-line:variable-name
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"csv-file.js","sourceRoot":"","sources":["../../../src/fetchers/csv-file.ts"],"names":[],"mappings":";;;
|
|
1
|
+
{"version":3,"file":"csv-file.js","sourceRoot":"","sources":["../../../src/fetchers/csv-file.ts"],"names":[],"mappings":";;;AAAA,kCAAoD;AAE7C,IAAM,iBAAiB,GAAG,UAC/B,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8BAA8B,EAAE,CAAA;KACjE;IAED,2EAA2E;IAC3E,IAAI,MAAM,CAAC,OAAO,KAAK,SAAS,EAAE;QAChC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE;YAClC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;SACxE;QAED,KAAqB,UAAc,EAAd,KAAA,MAAM,CAAC,OAAO,EAAd,cAAc,EAAd,IAAc,EAAE;YAAhC,IAAM,MAAM,SAAA;YACf,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,OAAO,MAAM,CAAC,EAAE,KAAK,QAAQ,EAAE;gBACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;aACrE;SACF;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAzBY,QAAA,iBAAiB,qBAyB7B;AAOM,IAAM,sBAAsB,GAAG,UAAC,MAA+B;IACpE,IAAM,SAAS,GAAG,MAAuB,CAAA;IACzC,OAAO,uBAAgB,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,iBAEjE,IAAA,iCAAyB,GAAE,i0EA6E5B,CAAA;AACD,CAAC,CAAA;AAlFY,QAAA,sBAAsB,0BAkFlC;AAED,yCAAyC;AAClC,IAAM,uBAAuB,GAAG,UAAC,OAAY;IAClD,OAAO,knCA6CR,CAAA;AACD,CAAC,CAAA;AA/CY,QAAA,uBAAuB,2BA+CnC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.d.ts","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAyBpC,CAAA;AAOD,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"firestore.d.ts","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAyBpC,CAAA;AAOD,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAoIF,CAAA"}
|
|
@@ -29,7 +29,7 @@ exports.validateFirestoreConfig = validateFirestoreConfig;
|
|
|
29
29
|
var generateFirestoreFetcher = function (config, tableName) {
|
|
30
30
|
var firestoreConfig = config;
|
|
31
31
|
var serviceAccount = firestoreConfig.serviceAccount;
|
|
32
|
-
return "import * as admin from 'firebase-admin'\n\nlet firestore = null\n\nconst getFirestore = () => {\n if (firestore) return firestore\n \n const rawServiceAccount = ".concat((0, utils_1.replaceSecretReference)(serviceAccount), "\n let serviceAccount\n\n try {\n serviceAccount = JSON.parse(rawServiceAccount)\n } catch (error) {\n throw new Error('Invalid Firestore service account JSON: ' + error.message)\n }\n \n if (!admin.apps.length) {\n admin.initializeApp({\n credential: admin.credential.cert(serviceAccount)\n })\n }\n \n firestore = admin.firestore()\n return firestore\n}\n\nexport default async function handler(req, res) {\n try {\n const firestore = getFirestore()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let queryRef = firestore.collection('").concat(tableName, "')\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n queryRef = queryRef.where(key, 'in', value)\n } else {\n queryRef = queryRef.where(key, '==', value)\n }\n })\n }\n \n let usePostFiltering = false\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n for (const column of columns) {\n queryRef = queryRef\n .where(column, '>=', query)\n .where(column, '<=', query + '\\uf8ff')\n }\n } else {\n // Firestore doesn't support full-text search without queryColumns\n // We'll fetch all data and filter in JavaScript\n usePostFiltering = true\n }\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? 'desc' : 'asc'\n queryRef = queryRef.orderBy(sortBy, sortOrderValue)\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage && parseInt(page) > 1 ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n // Only apply pagination at query level if not post-filtering\n if (!usePostFiltering) {\n if (limitValue) {\n queryRef = queryRef.limit(parseInt(limitValue))\n }\n if (offsetValue !== undefined) {\n queryRef = queryRef.offset(offsetValue)\n }\n }\n \n const snapshot = await queryRef.get()\n let documents = []\n snapshot.forEach((doc) => {\n documents.push({\n id: doc.id,\n ...doc.data()\n })\n })\n \n // Apply post-filtering if needed\n if (usePostFiltering && query) {\n const searchQuery = query.toLowerCase()\n documents = documents.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n \n // Apply pagination after filtering\n if (limitValue) {\n const start = offsetValue || 0\n documents = documents.slice(start, start + parseInt(limitValue))\n } else if (offsetValue) {\n documents = documents.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(documents))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Firestore fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
32
|
+
return "import * as admin from 'firebase-admin'\n\nlet firestore = null\n\nconst getFirestore = () => {\n if (firestore) return firestore\n \n const rawServiceAccount = ".concat((0, utils_1.replaceSecretReference)(serviceAccount), "\n let serviceAccount\n\n try {\n serviceAccount = JSON.parse(rawServiceAccount)\n } catch (error) {\n throw new Error('Invalid Firestore service account JSON: ' + error.message)\n }\n \n if (!admin.apps.length) {\n admin.initializeApp({\n credential: admin.credential.cert(serviceAccount)\n })\n }\n \n firestore = admin.firestore()\n return firestore\n}\n\n").concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n try {\n const firestore = getFirestore()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let queryRef = firestore.collection('").concat(tableName, "')\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n queryRef = queryRef.where(key, 'in', value)\n } else {\n queryRef = queryRef.where(key, '==', value)\n }\n })\n }\n \n let usePostFiltering = false\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n for (const column of columns) {\n queryRef = queryRef\n .where(column, '>=', query)\n .where(column, '<=', query + '\\uf8ff')\n }\n } else {\n // Firestore doesn't support full-text search without queryColumns\n // We'll fetch all data and filter in JavaScript\n usePostFiltering = true\n }\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? 'desc' : 'asc'\n queryRef = queryRef.orderBy(sortBy, sortOrderValue)\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage && parseInt(page) > 1 ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n // Only apply pagination at query level if not post-filtering\n if (!usePostFiltering) {\n if (limitValue) {\n queryRef = queryRef.limit(parseInt(limitValue))\n }\n if (offsetValue !== undefined) {\n queryRef = queryRef.offset(offsetValue)\n }\n }\n \n const snapshot = await queryRef.get()\n let documents = []\n snapshot.forEach((doc) => {\n documents.push({\n id: doc.id,\n ...doc.data()\n })\n })\n \n // Apply post-filtering if needed\n if (usePostFiltering && query) {\n const searchQuery = query.toLowerCase()\n documents = documents.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n \n // Apply pagination after filtering\n if (limitValue) {\n const start = offsetValue || 0\n documents = documents.slice(start, start + parseInt(limitValue))\n } else if (offsetValue) {\n documents = documents.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(documents, dateReplacer))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Firestore fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
33
33
|
};
|
|
34
34
|
exports.generateFirestoreFetcher = generateFirestoreFetcher;
|
|
35
35
|
//# sourceMappingURL=firestore.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.js","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"firestore.js","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAErE,IAAM,uBAAuB,GAAG,UACrC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,cAAc,IAAI,OAAO,MAAM,CAAC,cAAc,KAAK,QAAQ,EAAE;QACvE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4CAA4C,EAAE,CAAA;KAC/E;IAED,IAAM,cAAc,GAAG,MAAM,CAAC,cAAwB,CAAA;IAEtD,iGAAiG;IACjG,sEAAsE;IACtE,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE;QACrD,IAAI;YACF,IAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;YACzC,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACrE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kDAAkD,EAAE,CAAA;aACrF;SACF;QAAC,WAAM;YACN,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,oCAAoC,EAAE,CAAA;SACvE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA3BY,QAAA,uBAAuB,2BA2BnC;AAOM,IAAM,wBAAwB,GAAG,UACtC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,eAAe,GAAG,MAAyB,CAAA;IACjD,IAAM,cAAc,GAAG,eAAe,CAAC,cAAc,CAAA;IAErD,OAAO,8KAOqB,IAAA,8BAAsB,EAAC,cAAc,CAAC,+YAmBlE,IAAA,iCAAyB,GAAE,2QAOc,SAAS,k3FA8FnD,CAAA;AACD,CAAC,CAAA;AAvIY,QAAA,wBAAwB,4BAuIpC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"google-sheets.d.ts","sourceRoot":"","sources":["../../../src/fetchers/google-sheets.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,0BAA0B,WAC7B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAwBpC,CAAA;AAWD,eAAO,MAAM,2BAA2B,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,
|
|
1
|
+
{"version":3,"file":"google-sheets.d.ts","sourceRoot":"","sources":["../../../src/fetchers/google-sheets.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,0BAA0B,WAC7B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAwBpC,CAAA;AAWD,eAAO,MAAM,2BAA2B,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MA2P7E,CAAA"}
|
|
@@ -24,7 +24,7 @@ var validateGoogleSheetsConfig = function (config) {
|
|
|
24
24
|
exports.validateGoogleSheetsConfig = validateGoogleSheetsConfig;
|
|
25
25
|
var generateGoogleSheetsFetcher = function (config) {
|
|
26
26
|
var sheetsConfig = config;
|
|
27
|
-
return "import fetch from 'node-fetch'\n\nexport default async function handler(req, res) {\n try {\n const sheetUrl = ".concat(JSON.stringify(sheetsConfig.sheetUrl), "\n let sheetId = ").concat(JSON.stringify(sheetsConfig.sheetId), "\n const range = ").concat(JSON.stringify(sheetsConfig.range || 'A1:Z1000'), "\n const maxRows = ").concat(sheetsConfig.maxRows || 0, "\n \n if (!sheetId && sheetUrl) {\n const match = sheetUrl.match(/\\/d\\/([a-zA-Z0-9-_]+)/)\n sheetId = match ? match[1] : undefined\n }\n \n if (!sheetId) {\n return res.status(400).json({\n success: false,\n error: 'Invalid Google Sheets URL or Sheet ID',\n timestamp: Date.now()\n })\n }\n \n let url = `https://docs.google.com/spreadsheets/d/${sheetId}/gviz/tq?tqx=out:json&range=${range}`\n \n if (maxRows && maxRows > 0) {\n url += `&tq=limit ${maxRows}`\n }\n \n const response = await fetch(url)\n \n if (!response.ok) {\n return res.status(response.status).json({\n success: false,\n error: `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n const text = await response.text()\n const jsonMatch = text.match(/google\\.visualization\\.Query\\.setResponse\\((.*)\\);/)\n \n if (!jsonMatch) {\n return res.status(500).json({\n success: false,\n error: 'Unable to parse Google Sheets response',\n timestamp: Date.now()\n })\n }\n \n const data = JSON.parse(jsonMatch[1])\n \n if (data.status === 'error') {\n return res.status(500).json({\n success: false,\n error: data.errors?.[0]?.detailed_message || 'Failed to fetch Google Sheets data',\n timestamp: Date.now()\n })\n }\n \n const table = data.table\n const columns = table.cols.map((col, index) => ({\n
|
|
27
|
+
return "import fetch from 'node-fetch'\n\nexport default async function handler(req, res) {\n try {\n const sheetUrl = ".concat(JSON.stringify(sheetsConfig.sheetUrl), "\n let sheetId = ").concat(JSON.stringify(sheetsConfig.sheetId), "\n const range = ").concat(JSON.stringify(sheetsConfig.range || 'A1:Z1000'), "\n const maxRows = ").concat(sheetsConfig.maxRows || 0, "\n \n if (!sheetId && sheetUrl) {\n const match = sheetUrl.match(/\\/d\\/([a-zA-Z0-9-_]+)/)\n sheetId = match ? match[1] : undefined\n }\n \n if (!sheetId) {\n return res.status(400).json({\n success: false,\n error: 'Invalid Google Sheets URL or Sheet ID',\n timestamp: Date.now()\n })\n }\n \n let gid = undefined\n if (sheetUrl) {\n const gidMatch = sheetUrl.match(/[#&]gid=([0-9]+)/)\n gid = gidMatch ? gidMatch[1] : undefined\n }\n \n let url = `https://docs.google.com/spreadsheets/d/${sheetId}/gviz/tq?tqx=out:json&range=${range}`\n \n if (gid) {\n url += `&gid=${gid}`\n }\n \n if (maxRows && maxRows > 0) {\n url += `&tq=limit ${maxRows}`\n }\n \n const response = await fetch(url)\n \n if (!response.ok) {\n return res.status(response.status).json({\n success: false,\n error: `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n const text = await response.text()\n const jsonMatch = text.match(/google\\.visualization\\.Query\\.setResponse\\((.*)\\);/)\n \n if (!jsonMatch) {\n return res.status(500).json({\n success: false,\n error: 'Unable to parse Google Sheets response',\n timestamp: Date.now()\n })\n }\n \n const data = JSON.parse(jsonMatch[1])\n \n if (data.status === 'error') {\n return res.status(500).json({\n success: false,\n error: data.errors?.[0]?.detailed_message || 'Failed to fetch Google Sheets data',\n timestamp: Date.now()\n })\n }\n \n const formatDateValue = (date) => {\n const options = {\n year: 'numeric',\n month: 'short',\n day: 'numeric',\n }\n \n const timeOptions = {\n hour: '2-digit',\n minute: '2-digit',\n }\n \n const hasTime = date.getHours() !== 0 || date.getMinutes() !== 0 || date.getSeconds() !== 0\n \n if (hasTime) {\n return date.toLocaleString('en-US', { ...options, ...timeOptions })\n }\n \n return date.toLocaleDateString('en-US', options)\n }\n \n const parseGoogleSheetsValue = (value) => {\n if (typeof value === 'string') {\n const dateMatch = value.match(/^Date\\((\\d+),(\\d+),(\\d+)(?:,(\\d+),(\\d+),(\\d+))?\\)$/)\n if (dateMatch) {\n const year = parseInt(dateMatch[1], 10)\n const month = parseInt(dateMatch[2], 10)\n const day = parseInt(dateMatch[3], 10)\n const hour = dateMatch[4] ? parseInt(dateMatch[4], 10) : 0\n const minute = dateMatch[5] ? parseInt(dateMatch[5], 10) : 0\n const second = dateMatch[6] ? parseInt(dateMatch[6], 10) : 0\n const date = new Date(year, month, day, hour, minute, second)\n return formatDateValue(date)\n }\n }\n return value\n }\n \n const table = data.table\n const rawRows = table.rows || []\n \n if (rawRows.length === 0) {\n return res.status(200).json({\n success: true,\n data: [],\n timestamp: Date.now()\n })\n }\n \n const firstRow = rawRows[0]\n const firstRowValues = firstRow.c.map((cell) => cell?.v ?? cell?.f ?? null)\n \n const hasHeaderRow = firstRowValues.every((val) => \n val !== null && val !== undefined && val !== '' && typeof val === 'string'\n )\n \n let columns\n let dataRows\n \n if (hasHeaderRow && rawRows.length > 1) {\n columns = firstRowValues.map((headerValue, index) => ({\n id: `col_${index}`,\n label: String(headerValue),\n type: 'string'\n }))\n dataRows = rawRows.slice(1)\n } else {\n columns = table.cols.map((col, index) => ({\n id: col.id || `col_${index}`,\n label: col.label || `Column ${index + 1}`,\n type: col.type || 'string'\n }))\n dataRows = rawRows\n }\n \n if (maxRows && dataRows.length > maxRows) {\n dataRows = dataRows.slice(0, maxRows)\n }\n \n const rows = dataRows.map((row) => {\n const rowData = {}\n row.c.forEach((cell, index) => {\n const columnId = columns[index]?.id || `col_${index}`\n const rawValue = cell?.v ?? cell?.f ?? null\n rowData[columnId] = parseGoogleSheetsValue(rawValue)\n })\n return rowData\n })\n \n const columnsWithData = columns.filter((col, index) => {\n const hasHeaderData = col.label && col.label !== `Column ${index + 1}`\n const hasDataInColumn = rows.some((row) => {\n const value = row[col.id]\n return value !== null && value !== undefined && value !== ''\n })\n return hasHeaderData || hasDataInColumn\n })\n \n const filteredRows = rows.map((row) => {\n const filteredRow = {}\n columnsWithData.forEach((col) => {\n filteredRow[col.id] = row[col.id]\n })\n return filteredRow\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset: offsetParam } = req.query\n \n let filteredData = [...filteredRows]\n \n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n const searchColumns = JSON.parse(queryColumns)\n filteredData = filteredData.filter((item) => {\n return searchColumns.some((col) => {\n const value = item[col]\n return value && String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n filteredData = filteredData.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n filteredData = filteredData.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n }\n \n if (sortBy) {\n filteredData.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const limitValue = limit || perPage\n const offsetValue = offsetParam !== undefined ? parseInt(offsetParam) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n filteredData = filteredData.slice(offsetValue, offsetValue + parseInt(limitValue))\n }\n \n const safeData = JSON.parse(JSON.stringify(filteredData))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Google Sheets fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
28
28
|
};
|
|
29
29
|
exports.generateGoogleSheetsFetcher = generateGoogleSheetsFetcher;
|
|
30
30
|
//# sourceMappingURL=google-sheets.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"google-sheets.js","sourceRoot":"","sources":["../../../src/fetchers/google-sheets.ts"],"names":[],"mappings":";;;AAAO,IAAM,0BAA0B,GAAG,UACxC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACvC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;KACxE;IAED,IAAI,MAAM,CAAC,OAAO,IAAI,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,IAAI,MAAM,CAAC,QAAQ,EAAE;QACnB,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACvC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;SAC/D;QAED,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,8BAA8B,CAAC,EAAE;YAC7D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;SACrE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA1BY,QAAA,0BAA0B,8BA0BtC;AAWM,IAAM,2BAA2B,GAAG,UAAC,MAA+B;IACzE,IAAM,YAAY,GAAG,MAA4B,CAAA;IACjD,OAAO,6HAIc,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,QAAQ,CAAC,iCACxC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,OAAO,CAAC,iCACpC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,IAAI,UAAU,CAAC,mCAC9C,YAAY,CAAC,OAAO,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"google-sheets.js","sourceRoot":"","sources":["../../../src/fetchers/google-sheets.ts"],"names":[],"mappings":";;;AAAO,IAAM,0BAA0B,GAAG,UACxC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACvC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;KACxE;IAED,IAAI,MAAM,CAAC,OAAO,IAAI,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,IAAI,MAAM,CAAC,QAAQ,EAAE;QACnB,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACvC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;SAC/D;QAED,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,8BAA8B,CAAC,EAAE;YAC7D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;SACrE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA1BY,QAAA,0BAA0B,8BA0BtC;AAWM,IAAM,2BAA2B,GAAG,UAAC,MAA+B;IACzE,IAAM,YAAY,GAAG,MAA4B,CAAA;IACjD,OAAO,6HAIc,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,QAAQ,CAAC,iCACxC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,OAAO,CAAC,iCACpC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,IAAI,UAAU,CAAC,mCAC9C,YAAY,CAAC,OAAO,IAAI,CAAC,qrOAiP9C,CAAA;AACD,CAAC,CAAA;AA3PY,QAAA,2BAA2B,+BA2PvC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"javascript.d.ts","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"javascript.d.ts","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA2BpC,CAAA;AAMD,eAAO,MAAM,yBAAyB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAqG3E,CAAA;AAGD,eAAO,MAAM,8BAA8B,YAAa,GAAG,KAAG,MA+C7D,CAAA"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.generateJavaScriptCountFetcher = exports.generateJavaScriptFetcher = exports.validateJavaScriptConfig = void 0;
|
|
4
|
+
var utils_1 = require("../utils");
|
|
4
5
|
var validateJavaScriptConfig = function (config) {
|
|
5
6
|
if (!config || typeof config !== 'object') {
|
|
6
7
|
return { isValid: false, error: 'Config must be a valid object' };
|
|
@@ -29,7 +30,7 @@ var validateJavaScriptConfig = function (config) {
|
|
|
29
30
|
exports.validateJavaScriptConfig = validateJavaScriptConfig;
|
|
30
31
|
var generateJavaScriptFetcher = function (config) {
|
|
31
32
|
var jsConfig = config;
|
|
32
|
-
return "
|
|
33
|
+
return "".concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n try {\n const { limit, offset, page, perPage, query, queryColumns, sortBy, sortOrder, filters } = req.query\n \n const code = ").concat(JSON.stringify(jsConfig.code), "\n const executeCode = new Function('return ' + code)\n let data = executeCode()\n \n if (Array.isArray(data)) {\n // 1. Apply search filter\n if (query && query.trim()) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n try {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n data = data.filter(item => {\n return columns.some(col => {\n const value = item[col]\n if (value === null || value === undefined) return false\n return String(value).toLowerCase().includes(searchQuery)\n })\n })\n } catch (err) {\n console.error('Error parsing queryColumns:', err)\n }\n } else {\n data = data.filter(item => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n // 2. Apply custom filters\n if (filters) {\n try {\n const parsedFilters = typeof filters === 'string' ? JSON.parse(filters) : filters\n data = data.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n } catch (err) {\n console.error('Error parsing filters:', err)\n }\n }\n \n // 3. Apply sorting\n if (sortBy && sortBy.trim()) {\n data.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n // 4. Apply pagination\n const limitValue = limit || perPage\n const pageValue = page ? Math.max(1, parseInt(page)) : undefined\n const offsetValue = offset !== undefined ? Math.max(0, parseInt(offset)) : (pageValue && perPage ? (pageValue - 1) * Math.max(1, parseInt(perPage)) : 0)\n \n if (limitValue) {\n const limitInt = Math.max(1, parseInt(limitValue))\n data = data.slice(offsetValue, offsetValue + limitInt)\n } else if (offsetValue > 0) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data, dateReplacer))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('JavaScript execution error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to execute code',\n timestamp: Date.now()\n })\n }\n}\n");
|
|
33
34
|
};
|
|
34
35
|
exports.generateJavaScriptFetcher = generateJavaScriptFetcher;
|
|
35
36
|
// tslint:disable-next-line:variable-name
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"javascript.js","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":";;;
|
|
1
|
+
{"version":3,"file":"javascript.js","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":";;;AAAA,kCAAoD;AAE7C,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAChF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,6BAA6B,EAAE,CAAA;KAChE;IAED,IAAM,iBAAiB,GAAG;QACxB,eAAe;QACf,YAAY;QACZ,YAAY;QACZ,gBAAgB;QAChB,YAAY;QACZ,WAAW;QACX,cAAc;KACf,CAAA;IAED,KAAsB,UAAiB,EAAjB,uCAAiB,EAAjB,+BAAiB,EAAjB,IAAiB,EAAE;QAApC,IAAM,OAAO,0BAAA;QAChB,IAAI,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAC7B,OAAO,CAAC,IAAI,CAAC,gFAAgF,CAAC,CAAA;YAC9F,MAAK;SACN;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA7BY,QAAA,wBAAwB,4BA6BpC;AAMM,IAAM,yBAAyB,GAAG,UAAC,MAA+B;IACvE,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,OAAO,UAAG,IAAA,iCAAyB,GAAE,6MAMpB,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,ikGA4F/C,CAAA;AACD,CAAC,CAAA;AArGY,QAAA,yBAAyB,6BAqGrC;AAED,yCAAyC;AAClC,IAAM,8BAA8B,GAAG,UAAC,OAAY;IACzD,OAAO,gmCA6CR,CAAA;AACD,CAAC,CAAA;AA/CY,QAAA,8BAA8B,kCA+C1C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mariadb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"mariadb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA2IF,CAAA;AAED,eAAO,MAAM,2BAA2B,WAC9B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA8GF,CAAA"}
|
|
@@ -5,13 +5,13 @@ var utils_1 = require("../utils");
|
|
|
5
5
|
var generateMariaDBFetcher = function (config, tableName) {
|
|
6
6
|
var mariaConfig = config;
|
|
7
7
|
var database = mariaConfig.database;
|
|
8
|
-
return "import mariadb from 'mariadb'\n\nexport default async function handler(req, res) {\n let connection = null\n try {\n connection = await mariadb.createConnection({\n host: ".concat(JSON.stringify(mariaConfig.host), ",\n port: ").concat(mariaConfig.port || 3306, ",\n user: ").concat(JSON.stringify(mariaConfig.user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(mariaConfig.password), ",\n database: ").concat(JSON.stringify(mariaConfig.database), ",\n ssl: ").concat(mariaConfig.ssl || false).concat(mariaConfig.sslConfig
|
|
8
|
+
return "import mariadb from 'mariadb'\n\n".concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n let connection = null\n try {\n connection = await mariadb.createConnection({\n host: ").concat(JSON.stringify(mariaConfig.host), ",\n port: ").concat(mariaConfig.port || 3306, ",\n user: ").concat(JSON.stringify(mariaConfig.user), ",\n password: ").concat((0, utils_1.replaceSecretReference)(mariaConfig.password), ",\n database: ").concat(JSON.stringify(mariaConfig.database), ",\n ssl: ").concat(mariaConfig.ssl || false).concat(mariaConfig.sslConfig
|
|
9
9
|
? ",\n sslConfig: {\n ".concat(mariaConfig.sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.ca), ",") : '', "\n ").concat(mariaConfig.sslConfig.cert
|
|
10
10
|
? "cert: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.cert), ",")
|
|
11
11
|
: '', "\n ").concat(mariaConfig.sslConfig.key
|
|
12
12
|
? "key: ".concat((0, utils_1.replaceSecretReference)(mariaConfig.sslConfig.key), ",")
|
|
13
13
|
: '', "\n rejectUnauthorized: ").concat(mariaConfig.sslConfig.rejectUnauthorized !== false, "\n }")
|
|
14
|
-
: '', "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(\\`${col}\\` AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`\\`${key}\\` IN (${placeholders})`)\n } else {\n conditions.push(`\\`${key}\\` = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM \\`").concat(tableName, "\\``\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY \\`${sortBy}\\` ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const rows = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MariaDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MariaDB connection:', error)\n }\n }\n }\n}\n");
|
|
14
|
+
: '', "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaRows = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(\\`${col}\\` AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`\\`${key}\\` IN (${placeholders})`)\n } else {\n conditions.push(`\\`${key}\\` = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM \\`").concat(tableName, "\\``\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY \\`${sortBy}\\` ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const rows = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows, dateReplacer))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MariaDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MariaDB connection:', error)\n }\n }\n }\n}\n");
|
|
15
15
|
};
|
|
16
16
|
exports.generateMariaDBFetcher = generateMariaDBFetcher;
|
|
17
17
|
var generateMariaDBCountFetcher = function (config, tableName) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mariadb.js","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"mariadb.js","sourceRoot":"","sources":["../../../src/fetchers/mariadb.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAarE,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,2CAEP,IAAA,iCAAyB,GAAE,qKAMf,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,4BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,4BACxB,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,gCAC5B,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,gCAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,2BACzC,WAAW,CAAC,GAAG,IAAI,KAAK,SACjC,WAAW,CAAC,SAAS;QACnB,CAAC,CAAC,yCAGE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,uBAG1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,uBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,2CAEc,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,KAAK,cACxE;QACF,CAAC,CAAC,EAAE,yqBAsBK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,kkCA8BjC,SAAS,k5CAkD5C,CAAA;AACD,CAAC,CAAA;AA9IY,QAAA,sBAAsB,0BA8IlC;AAEM,IAAM,2BAA2B,GAAG,UACzC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,oJAMK,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,4BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,4BACxB,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,gCAC5B,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,gCAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,2BACzC,WAAW,CAAC,GAAG,IAAI,KAAK,SACjC,WAAW,CAAC,SAAS;QACnB,CAAC,CAAC,yCAGE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,uBAG1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,uBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,2CAEc,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,KAAK,cACxE;QACF,CAAC,CAAC,EAAE,itBAqBK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwBhB,SAAS,ouBA8B7D,CAAA;AACD,CAAC,CAAA;AAjHY,QAAA,2BAA2B,+BAiHvC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mongodb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAoCpC,CAAA;AAWD,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"mongodb.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAoCpC,CAAA;AAWD,eAAO,MAAM,sBAAsB,WACzB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA8HF,CAAA;AAGD,eAAO,MAAM,2BAA2B,YAAa,GAAG,aAAa,MAAM,KAAG,MA6D7E,CAAA"}
|
|
@@ -41,7 +41,7 @@ var generateMongoDBFetcher = function (config, tableName) {
|
|
|
41
41
|
if (!connectionString) {
|
|
42
42
|
connectionString = "mongodb://".concat(hasUsername ? "".concat(mongoConfig.username, ":").concat(mongoConfig.password, "@") : '').concat(mongoConfig.host, ":").concat(mongoConfig.port || 27017, "/").concat(database);
|
|
43
43
|
}
|
|
44
|
-
return "import { MongoClient, ObjectId } from 'mongodb'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n const url = ".concat((0, utils_1.replaceSecretReference)(connectionString), "\n client = new MongoClient(url, {\n connectTimeoutMS: 30000,\n serverSelectionTimeoutMS: 30000\n })\n \n await client.connect()\n const db = client.db(").concat(JSON.stringify(database), ")\n const collection = db.collection('").concat(tableName, "')\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const filter = {}\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all field names from a sample document\n try {\n const sampleDoc = await db.collection(").concat(JSON.stringify(tableName), ").findOne({})\n if (sampleDoc) {\n columns = Object.keys(sampleDoc).filter(key => key !== '_id')\n }\n } catch (schemaError) {\n console.warn('Failed to fetch sample document for column names:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const orConditions = columns.map((col) => ({\n [col]: { $regex: query, $options: 'i' }\n }))\n filter.$or = orConditions\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (key === '_id') {\n if (Array.isArray(value)) {\n filter[key] = {\n $in: value.map((id) => (typeof id === 'string' ? new ObjectId(id) : id))\n }\n } else if (typeof value === 'string') {\n filter[key] = new ObjectId(value)\n } else {\n filter[key] = value\n }\n } else if (Array.isArray(value)) {\n filter[key] = { $in: value }\n } else {\n filter[key] = value\n }\n })\n }\n \n let cursor = collection.find(filter)\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n cursor = cursor.sort({ [sortBy]: sortOrderValue })\n }\n \n const limitValue = limit || perPage\n const skipValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (skipValue !== undefined) {\n cursor = cursor.skip(skipValue)\n }\n \n if (limitValue) {\n cursor = cursor.limit(parseInt(limitValue))\n }\n \n const documents = await cursor.toArray()\n const safeData = JSON.parse(JSON.stringify(documents))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MongoDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.close()\n } catch (error) {\n console.error('Error closing MongoDB client:', error)\n }\n }\n }\n}\n");
|
|
44
|
+
return "import { MongoClient, ObjectId } from 'mongodb'\n\n".concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n let client = null\n try {\n const url = ").concat((0, utils_1.replaceSecretReference)(connectionString), "\n client = new MongoClient(url, {\n connectTimeoutMS: 30000,\n serverSelectionTimeoutMS: 30000\n })\n \n await client.connect()\n const db = client.db(").concat(JSON.stringify(database), ")\n const collection = db.collection('").concat(tableName, "')\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const filter = {}\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all field names from a sample document\n try {\n const sampleDoc = await db.collection(").concat(JSON.stringify(tableName), ").findOne({})\n if (sampleDoc) {\n columns = Object.keys(sampleDoc).filter(key => key !== '_id')\n }\n } catch (schemaError) {\n console.warn('Failed to fetch sample document for column names:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const orConditions = columns.map((col) => ({\n [col]: { $regex: query, $options: 'i' }\n }))\n filter.$or = orConditions\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (key === '_id') {\n if (Array.isArray(value)) {\n filter[key] = {\n $in: value.map((id) => (typeof id === 'string' ? new ObjectId(id) : id))\n }\n } else if (typeof value === 'string') {\n filter[key] = new ObjectId(value)\n } else {\n filter[key] = value\n }\n } else if (Array.isArray(value)) {\n filter[key] = { $in: value }\n } else {\n filter[key] = value\n }\n })\n }\n \n let cursor = collection.find(filter)\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n cursor = cursor.sort({ [sortBy]: sortOrderValue })\n }\n \n const limitValue = limit || perPage\n const skipValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (skipValue !== undefined) {\n cursor = cursor.skip(skipValue)\n }\n \n if (limitValue) {\n cursor = cursor.limit(parseInt(limitValue))\n }\n \n const documents = await cursor.toArray()\n const safeData = JSON.parse(JSON.stringify(documents, dateReplacer))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MongoDB fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.close()\n } catch (error) {\n console.error('Error closing MongoDB client:', error)\n }\n }\n }\n}\n");
|
|
45
45
|
};
|
|
46
46
|
exports.generateMongoDBFetcher = generateMongoDBFetcher;
|
|
47
47
|
// tslint:disable-next-line:variable-name
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mongodb.js","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"mongodb.js","sourceRoot":"","sources":["../../../src/fetchers/mongodb.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAErE,IAAM,qBAAqB,GAAG,UACnC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,+CAA+C;IAC/C,IAAI,MAAM,CAAC,gBAAgB,EAAE;QAC3B,IAAI,OAAO,MAAM,CAAC,gBAAgB,KAAK,QAAQ,IAAI,MAAM,CAAC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8CAA8C,EAAE,CAAA;SACjF;QAED,uFAAuF;QACvF,IAAM,OAAO,GAAG,MAAM,CAAC,gBAA0B,CAAA;QACjD,IACE,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,CAAC;YAC1C,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC;YACjC,CAAC,OAAO,CAAC,UAAU,CAAC,gBAAgB,CAAC,EACrC;YACA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,0CAA0C,EAAE,CAAA;SAC7E;QAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;KACzB;IAED,2EAA2E;IAC3E,yEAAyE;IACzE,uCAAuC;IACvC,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;QAChE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAA;KAC1D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,2BAA2B,EAAE,CAAA;KAC9D;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAtCY,QAAA,qBAAqB,yBAsCjC;AAWM,IAAM,sBAAsB,GAAG,UACpC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAuB,CAAA;IAC3C,IAAM,WAAW,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IACzC,IAAM,QAAQ,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,QAAQ,CAAA;IAEtC,qDAAqD;IACrD,IAAI,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAA;IACnD,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,oBACjB,WAAW,CAAC,CAAC,CAAC,UAAG,WAAW,CAAC,QAAQ,cAAI,WAAW,CAAC,QAAQ,MAAG,CAAC,CAAC,CAAC,EAAE,SACpE,WAAW,CAAC,IAAI,cAAI,WAAW,CAAC,IAAI,IAAI,KAAK,cAAI,QAAQ,CAAE,CAAA;KAC/D;IAED,OAAO,6DAEP,IAAA,iCAAyB,GAAE,kHAKX,IAAA,8BAAsB,EAAC,gBAAgB,CAAC,8LAO/B,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,sDACX,SAAS,8cAeC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,q5EAkF1E,CAAA;AACD,CAAC,CAAA;AAjIY,QAAA,sBAAsB,0BAiIlC;AAED,yCAAyC;AAClC,IAAM,2BAA2B,GAAG,UAAC,OAAY,EAAE,SAAiB;IACzE,OAAO,kNAO+B,SAAS,i+CAoDhD,CAAA;AACD,CAAC,CAAA;AA7DY,QAAA,2BAA2B,+BA6DvC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mysql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"mysql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAoJF,CAAA;AAED,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAgFF,CAAA"}
|
|
@@ -19,7 +19,7 @@ var generateMySQLFetcher = function (config, tableName) {
|
|
|
19
19
|
: defaultSSLEnabled
|
|
20
20
|
? "{ rejectUnauthorized: true }"
|
|
21
21
|
: 'false';
|
|
22
|
-
return "import mysql from 'mysql2/promise'\n\nconst getConnection = () => {\n return mysql.createConnection({\n host: ".concat(JSON.stringify(mysqlConfig.host), ",\n port: ").concat(mysqlConfig.port || 3306, ",\n user: ").concat(resolvedUser !== null ? JSON.stringify(resolvedUser) : 'undefined', ",\n password: ").concat((0, utils_1.replaceSecretReference)(mysqlConfig.password), ",\n database: ").concat(JSON.stringify(mysqlConfig.database), ",\n ssl: ").concat(sslConfigString, "\n })\n}\n\nexport default async function handler(req, res) {\n const connection = await getConnection()\n \n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(${mysql.escapeId(col)} AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`${mysql.escapeId(key)} IN (${placeholders})`)\n } else {\n conditions.push(`${mysql.escapeId(key)} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM ${mysql.escapeId('").concat(tableName, "')}`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${mysql.escapeId(sortBy)} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const [rows] = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MySQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MySQL connection:', error)\n }\n }\n }\n}\n");
|
|
22
|
+
return "import mysql from 'mysql2/promise'\n\nconst getConnection = () => {\n return mysql.createConnection({\n host: ".concat(JSON.stringify(mysqlConfig.host), ",\n port: ").concat(mysqlConfig.port || 3306, ",\n user: ").concat(resolvedUser !== null ? JSON.stringify(resolvedUser) : 'undefined', ",\n password: ").concat((0, utils_1.replaceSecretReference)(mysqlConfig.password), ",\n database: ").concat(JSON.stringify(mysqlConfig.database), ",\n ssl: ").concat(sslConfigString, "\n })\n}\n\n").concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n const connection = await getConnection()\n \n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const [schemaRows] = await connection.query(\n `SELECT COLUMN_NAME FROM information_schema.COLUMNS \n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? \n ORDER BY ORDINAL_POSITION`,\n [").concat(JSON.stringify(database), ", ").concat(JSON.stringify(tableName), "]\n )\n columns = schemaRows.map(row => row.COLUMN_NAME)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => `CAST(${mysql.escapeId(col)} AS CHAR) LIKE ?`)\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n conditions.push(`${mysql.escapeId(key)} IN (${placeholders})`)\n } else {\n conditions.push(`${mysql.escapeId(key)} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n let sql = `SELECT * FROM ${mysql.escapeId('").concat(tableName, "')}`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${mysql.escapeId(sortBy)} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const [rows] = await connection.query(sql, queryParams)\n const rowArray = Array.isArray(rows) ? rows : []\n const plainRows = rowArray.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows, dateReplacer))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('MySQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (connection) {\n try {\n await connection.end()\n } catch (error) {\n console.error('Error closing MySQL connection:', error)\n }\n }\n }\n}\n");
|
|
23
23
|
};
|
|
24
24
|
exports.generateMySQLFetcher = generateMySQLFetcher;
|
|
25
25
|
var generateMySQLCountFetcher = function (config, tableName) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mysql.js","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"mysql.js","sourceRoot":"","sources":["../../../src/fetchers/mysql.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAarE,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,YAAY,GAAG,WAAW,CAAC,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,IAAI,CAAA;IACrE,IAAM,kBAAkB,GAAG,CAAC,CAAC,WAAW,CAAC,SAAS,CAAA;IAClD,IAAM,iBAAiB,GAAG,WAAW,CAAC,GAAG,KAAK,KAAK,CAAA;IACnD,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,IAAM,eAAe,GAAG,kBAAkB;QACxC,CAAC,CAAC,mBACE,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAE1F,WAAW,CAAC,SAAS,CAAC,IAAI;YACxB,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG;YAChE,CAAC,CAAC,EAAE,qBAGN,WAAW,CAAC,SAAS,CAAC,GAAG;YACvB,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG;YAC9D,CAAC,CAAC,EAAE,yCAGN,WAAW,CAAC,SAAS,CAAC,kBAAkB,KAAK,SAAS;YACpD,CAAC,CAAC,WAAW,CAAC,SAAS,CAAC,kBAAkB;YAC1C,CAAC,CAAC,IAAI,YAEV;QACF,CAAC,CAAC,iBAAiB;YACnB,CAAC,CAAC,8BAA8B;YAChC,CAAC,CAAC,OAAO,CAAA;IAEX,OAAO,4HAIG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,0BAChC,WAAW,CAAC,IAAI,IAAI,IAAI,0BACxB,YAAY,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,8BAC9D,IAAA,8BAAsB,EAAC,WAAW,CAAC,QAAQ,CAAC,8BAC5C,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,yBACzC,eAAe,0BAIxB,IAAA,iCAAyB,GAAE,2wBAwBd,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+mCA8BlB,SAAS,s5CAkD3D,CAAA;AACD,CAAC,CAAA;AAvJY,QAAA,oBAAoB,wBAuJhC;AAEM,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IAEzC,OAAO,gyBAsBM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,+1BAwB5B,SAAS,ouBA8B7D,CAAA;AACD,CAAC,CAAA;AAnFY,QAAA,yBAAyB,6BAmFrC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"postgresql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":"AAcA,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"postgresql.d.ts","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":"AAcA,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAoJF,CAAA;AAED,eAAO,MAAM,8BAA8B,WACjC,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA2FF,CAAA"}
|
|
@@ -10,7 +10,7 @@ var generatePostgreSQLFetcher = function (config, tableName) {
|
|
|
10
10
|
? 'false'
|
|
11
11
|
: pgConfig.sslConfig
|
|
12
12
|
? "{\n ".concat(pgConfig.sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(pgConfig.sslConfig.ca), ",") : '', "\n ").concat(pgConfig.sslConfig.cert ? "cert: ".concat((0, utils_1.replaceSecretReference)(pgConfig.sslConfig.cert), ",") : '', "\n ").concat(pgConfig.sslConfig.key ? "key: ".concat((0, utils_1.replaceSecretReference)(pgConfig.sslConfig.key), ",") : '', "\n rejectUnauthorized: false\n }")
|
|
13
|
-
: '{ rejectUnauthorized: false }', "\n })\n}\n\nexport default async function handler(req, res) {\n const client = getClient()\n \n try {\n await client.connect()\n ").concat(schema ? "await client.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ").concat(schema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = schema \n ? [").concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]\n : [").concat(JSON.stringify(tableName), "]\n \n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('PostgreSQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing PostgreSQL client:', error)\n }\n }\n }\n}\n");
|
|
13
|
+
: '{ rejectUnauthorized: false }', "\n })\n}\n\n").concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n const client = getClient()\n \n try {\n await client.connect()\n ").concat(schema ? "await client.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n // Use specified columns\n columns = JSON.parse(queryColumns)\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = `\n SELECT column_name \n FROM information_schema.columns \n WHERE table_name = $1\n ").concat(schema ? "AND table_schema = $2" : '', "\n ORDER BY ordinal_position\n `\n const schemaParams = schema \n ? [").concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]\n : [").concat(JSON.stringify(tableName), "]\n \n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n // Continue without search if we can't get columns\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows, dateReplacer))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('PostgreSQL fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing PostgreSQL client:', error)\n }\n }\n }\n}\n");
|
|
14
14
|
};
|
|
15
15
|
exports.generatePostgreSQLFetcher = generatePostgreSQLFetcher;
|
|
16
16
|
var generatePostgreSQLCountFetcher = function (config, tableName) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"postgresql.js","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"postgresql.js","sourceRoot":"","sources":["../../../src/fetchers/postgresql.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAcrE,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,MAAM,GAAG,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA;IAEvC,OAAO,qGAIG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,0BAC7B,QAAQ,CAAC,IAAI,IAAI,IAAI,0BACrB,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,QAAQ,CAAC,QAAQ,CAAC,8BAC9C,IAAA,8BAAsB,EAAC,QAAQ,CAAC,QAAQ,CAAC,8BACzC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,yBAE3C,QAAQ,CAAC,GAAG,KAAK,KAAK;QACpB,CAAC,CAAC,OAAO;QACT,CAAC,CAAC,QAAQ,CAAC,SAAS;YACpB,CAAC,CAAC,mBACF,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACpF,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAC1F,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,6CAEzF;YACE,CAAC,CAAC,+BAA+B,0BAKvC,IAAA,iCAAyB,GAAE,+IAOvB,MAAM,CAAC,CAAC,CAAC,iDAA0C,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,gnBAqB1D,MAAM,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,0HAIlC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,+BACpD,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,yxCAoCV,SAAS,o4CAkDxC,CAAA;AACD,CAAC,CAAA;AAvJY,QAAA,yBAAyB,6BAuJrC;AAEM,IAAM,8BAA8B,GAAG,UAC5C,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,IAAM,SAAS,GAAG,CAAC,CAAC,CAAA,MAAA,QAAQ,CAAC,OAAO,0CAAE,MAAM,CAAA,CAAA;IAE5C,OAAO,yvBAwBK,SAAS,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE,kGAI1C,SAAS;QACP,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAQ,CAAC,MAAM,CAAC,MAAG;QAC/E,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,g8BA0BJ,SAAS,8uBA8BpD,CAAA;AACD,CAAC,CAAA;AA9FY,QAAA,8BAA8B,kCA8F1C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"redis.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redis.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA8BpC,CAAA;AAYD,eAAO,MAAM,oBAAoB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,
|
|
1
|
+
{"version":3,"file":"redis.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redis.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA8BpC,CAAA;AAYD,eAAO,MAAM,oBAAoB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MA2GtE,CAAA"}
|
|
@@ -40,7 +40,7 @@ var generateRedisFetcher = function (config) {
|
|
|
40
40
|
if (!connectionString) {
|
|
41
41
|
connectionString = "redis://".concat(hasUsername ? "".concat(username, ":").concat(password, "@") : '').concat(host, ":").concat(port || 6379);
|
|
42
42
|
}
|
|
43
|
-
return "import { createClient } from 'redis'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ".concat((0, utils_1.replaceSecretReference)(connectionString)).concat(database ? ",\n database: ".concat(database) : '', "\n })\n \n await client.connect()\n \n const { query, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const pattern = (filters && JSON.parse(filters).pattern) || query || '*'\n const keys = await client.keys(pattern)\n \n const limitValue = limit || perPage || 100\n const skipValue = offset !== undefined ? parseInt(offset) : ((parseInt(page) || 1) - 1) * parseInt(limitValue)\n const paginatedKeys = keys.slice(skipValue, skipValue + parseInt(limitValue))\n \n const results = []\n for (const key of paginatedKeys) {\n const type = await client.type(key)\n const ttl = await client.ttl(key)\n let value\n \n switch (type) {\n case 'string':\n value = await client.get(key)\n break\n case 'list':\n value = await client.lRange(key, 0, -1)\n break\n case 'set':\n value = await client.sMembers(key)\n break\n case 'zset':\n value = await client.zRange(key, 0, -1)\n break\n case 'hash':\n value = await client.hGetAll(key)\n break\n default:\n value = null\n }\n \n results.push({\n key,\n type,\n value,\n ttl: ttl === -1 ? null : ttl\n })\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n results.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const safeData = JSON.parse(JSON.stringify(results))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redis fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n await client.quit()\n }\n }\n}\n");
|
|
43
|
+
return "import { createClient } from 'redis'\n\n".concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ").concat((0, utils_1.replaceSecretReference)(connectionString)).concat(database ? ",\n database: ".concat(database) : '', "\n })\n \n await client.connect()\n \n const { query, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const pattern = (filters && JSON.parse(filters).pattern) || query || '*'\n const keys = await client.keys(pattern)\n \n const limitValue = limit || perPage || 100\n const skipValue = offset !== undefined ? parseInt(offset) : ((parseInt(page) || 1) - 1) * parseInt(limitValue)\n const paginatedKeys = keys.slice(skipValue, skipValue + parseInt(limitValue))\n \n const results = []\n for (const key of paginatedKeys) {\n const type = await client.type(key)\n const ttl = await client.ttl(key)\n let value\n \n switch (type) {\n case 'string':\n value = await client.get(key)\n break\n case 'list':\n value = await client.lRange(key, 0, -1)\n break\n case 'set':\n value = await client.sMembers(key)\n break\n case 'zset':\n value = await client.zRange(key, 0, -1)\n break\n case 'hash':\n value = await client.hGetAll(key)\n break\n default:\n value = null\n }\n \n results.push({\n key,\n type,\n value,\n ttl: ttl === -1 ? null : ttl\n })\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n results.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n const safeData = JSON.parse(JSON.stringify(results, dateReplacer))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redis fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n await client.quit()\n }\n }\n}\n");
|
|
44
44
|
};
|
|
45
45
|
exports.generateRedisFetcher = generateRedisFetcher;
|
|
46
46
|
//# sourceMappingURL=redis.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"redis.js","sourceRoot":"","sources":["../../../src/fetchers/redis.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"redis.js","sourceRoot":"","sources":["../../../src/fetchers/redis.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAErE,IAAM,mBAAmB,GAAG,UACjC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,+CAA+C;IAC/C,IAAI,MAAM,CAAC,gBAAgB,EAAE;QAC3B,IAAI,OAAO,MAAM,CAAC,gBAAgB,KAAK,QAAQ,IAAI,MAAM,CAAC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACxF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8CAA8C,EAAE,CAAA;SACjF;QAED,uFAAuF;QACvF,IAAM,OAAO,GAAG,MAAM,CAAC,gBAA0B,CAAA;QACjD,IACE,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,CAAC;YAC1C,CAAC,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC;YAC/B,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,CAAC,EAChC;YACA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,wCAAwC,EAAE,CAAA;SAC3E;QAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;KACzB;IAED,kEAAkE;IAClE,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;QACnD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,8DAA8D,EAAE,CAAA;KACjG;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAhCY,QAAA,mBAAmB,uBAgC/B;AAYM,IAAM,oBAAoB,GAAG,UAAC,MAA+B;IAClE,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAA;IAC7B,IAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAA;IAC7B,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,WAAW,GAAG,QAAQ,CAAA;IAE5B,qDAAqD;IACrD,IAAI,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAA;IACnD,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,kBAAW,WAAW,CAAC,CAAC,CAAC,UAAG,QAAQ,cAAI,QAAQ,MAAG,CAAC,CAAC,CAAC,EAAE,SAAG,IAAI,cAChF,IAAI,IAAI,IAAI,CACZ,CAAA;KACH;IAED,OAAO,kDAEP,IAAA,iCAAyB,GAAE,0IAMhB,IAAA,8BAAsB,EAAC,gBAAgB,CAAC,SACjD,QAAQ,CAAC,CAAC,CAAC,6BAAsB,QAAQ,CAAE,CAAC,CAAC,CAAC,EAAE,qnEAgFnD,CAAA;AACD,CAAC,CAAA;AA3GY,QAAA,oBAAoB,wBA2GhC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,
|
|
1
|
+
{"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAsJF,CAAA"}
|
|
@@ -18,9 +18,9 @@ var generateRedshiftFetcher = function (config, tableName) {
|
|
|
18
18
|
: sslConfig
|
|
19
19
|
? "{\n ".concat(sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(sslConfig.ca), ",") : '', "\n ").concat(sslConfig.cert ? "cert: ".concat((0, utils_1.replaceSecretReference)(sslConfig.cert), ",") : '', "\n ").concat(sslConfig.key ? "key: ".concat((0, utils_1.replaceSecretReference)(sslConfig.key), ",") : '', "\n rejectUnauthorized: ").concat(sslConfig.rejectUnauthorized !== false, "\n }")
|
|
20
20
|
: '{ rejectUnauthorized: false }' // Default to SSL with no cert verification for Redshift
|
|
21
|
-
, "\n })\n}\n\nexport default async function handler(req, res) {\n const client = getClient()\n \n try {\n await client.connect()\n ").concat(schema ? "await client.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = 'SELECT column_name FROM information_schema.columns WHERE table_name = $1' + \n ").concat(schema ? "' AND table_schema = $2'" : "''", " + \n ' ORDER BY ordinal_position'\n const schemaParams = ").concat(schema
|
|
21
|
+
, "\n })\n}\n\n").concat((0, utils_1.generateDateFormatterCode)(), "\n\nexport default async function handler(req, res) {\n const client = getClient()\n \n try {\n await client.connect()\n ").concat(schema ? "await client.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = 'SELECT column_name FROM information_schema.columns WHERE table_name = $1' + \n ").concat(schema ? "' AND table_schema = $2'" : "''", " + \n ' ORDER BY ordinal_position'\n const schemaParams = ").concat(schema
|
|
22
22
|
? "[".concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]")
|
|
23
|
-
: "[".concat(JSON.stringify(tableName), "]"), "\n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing Redshift client:', error)\n }\n }\n }\n}\n");
|
|
23
|
+
: "[".concat(JSON.stringify(tableName), "]"), "\n const schemaResult = await client.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows, dateReplacer))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n try {\n await client.end()\n } catch (error) {\n console.error('Error closing Redshift client:', error)\n }\n }\n }\n}\n");
|
|
24
24
|
};
|
|
25
25
|
exports.generateRedshiftFetcher = generateRedshiftFetcher;
|
|
26
26
|
//# sourceMappingURL=redshift.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"redshift.js","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"redshift.js","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":";;;AAAA,kCAA4E;AAarE,IAAM,uBAAuB,GAAG,UACrC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,cAAc,GAAG,MAAwB,CAAA;IAC/C,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,GAAG,GAAG,cAAc,CAAC,GAAG,CAAA;IAC9B,IAAM,SAAS,GAAG,cAAc,CAAC,SAAS,CAAA;IAC1C,IAAM,MAAM,GAAG,MAAA,cAAc,CAAC,OAAO,0CAAE,MAAM,CAAA;IAE7C,OAAO,qGAIG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,0BACpB,IAAI,IAAI,IAAI,0BACZ,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,8BAChB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,8BAChC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,yBAElC,GAAG,KAAK,KAAK;QACX,CAAC,CAAC,+BAA+B;QACjC,CAAC,CAAC,SAAS;YACX,CAAC,CAAC,mBACF,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAClE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACxE,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,yCACjD,SAAS,CAAC,kBAAkB,KAAK,KAAK,YAC5D;YACE,CAAC,CAAC,+BAA+B,CAAC,wDAAwD;8BAKhG,IAAA,iCAAyB,GAAE,+IAOvB,MAAM,CAAC,CAAC,CAAC,iDAA0C,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,2oBAiB1D,MAAM,CAAC,CAAC,CAAC,0BAA0B,CAAC,CAAC,CAAC,IAAI,2FAG5C,MAAM;QACJ,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAG;QAC7D,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,8sCAmChB,SAAS,g4CAkDxC,CAAA;AACD,CAAC,CAAA;AAzJY,QAAA,uBAAuB,2BAyJnC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"rest-api.d.ts","sourceRoot":"","sources":["../../../src/fetchers/rest-api.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"rest-api.d.ts","sourceRoot":"","sources":["../../../src/fetchers/rest-api.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA0BpC,CAAA;AA2CD,eAAO,MAAM,sBAAsB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MA4IxE,CAAA"}
|