@teleporthq/teleport-plugin-next-data-source 0.42.0 → 0.42.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/COUNT_API_FIX.md +185 -0
  2. package/SEARCH_FIX_SUMMARY.md +96 -0
  3. package/dist/cjs/count-fetchers.d.ts.map +1 -1
  4. package/dist/cjs/count-fetchers.js +1 -1
  5. package/dist/cjs/count-fetchers.js.map +1 -1
  6. package/dist/cjs/fetchers/clickhouse.d.ts.map +1 -1
  7. package/dist/cjs/fetchers/clickhouse.js +1 -1
  8. package/dist/cjs/fetchers/clickhouse.js.map +1 -1
  9. package/dist/cjs/fetchers/firestore.d.ts.map +1 -1
  10. package/dist/cjs/fetchers/firestore.js +1 -1
  11. package/dist/cjs/fetchers/firestore.js.map +1 -1
  12. package/dist/cjs/fetchers/javascript.d.ts.map +1 -1
  13. package/dist/cjs/fetchers/javascript.js +1 -1
  14. package/dist/cjs/fetchers/javascript.js.map +1 -1
  15. package/dist/cjs/fetchers/redshift.d.ts.map +1 -1
  16. package/dist/cjs/fetchers/redshift.js +3 -1
  17. package/dist/cjs/fetchers/redshift.js.map +1 -1
  18. package/dist/cjs/fetchers/rest-api.d.ts.map +1 -1
  19. package/dist/cjs/fetchers/rest-api.js +2 -2
  20. package/dist/cjs/fetchers/rest-api.js.map +1 -1
  21. package/dist/cjs/fetchers/turso.d.ts.map +1 -1
  22. package/dist/cjs/fetchers/turso.js +1 -1
  23. package/dist/cjs/fetchers/turso.js.map +1 -1
  24. package/dist/cjs/pagination-plugin.d.ts.map +1 -1
  25. package/dist/cjs/pagination-plugin.js +280 -165
  26. package/dist/cjs/pagination-plugin.js.map +1 -1
  27. package/dist/cjs/tsconfig.tsbuildinfo +1 -1
  28. package/dist/esm/count-fetchers.d.ts.map +1 -1
  29. package/dist/esm/count-fetchers.js +1 -1
  30. package/dist/esm/count-fetchers.js.map +1 -1
  31. package/dist/esm/fetchers/clickhouse.d.ts.map +1 -1
  32. package/dist/esm/fetchers/clickhouse.js +1 -1
  33. package/dist/esm/fetchers/clickhouse.js.map +1 -1
  34. package/dist/esm/fetchers/firestore.d.ts.map +1 -1
  35. package/dist/esm/fetchers/firestore.js +1 -1
  36. package/dist/esm/fetchers/firestore.js.map +1 -1
  37. package/dist/esm/fetchers/javascript.d.ts.map +1 -1
  38. package/dist/esm/fetchers/javascript.js +1 -1
  39. package/dist/esm/fetchers/javascript.js.map +1 -1
  40. package/dist/esm/fetchers/redshift.d.ts.map +1 -1
  41. package/dist/esm/fetchers/redshift.js +3 -1
  42. package/dist/esm/fetchers/redshift.js.map +1 -1
  43. package/dist/esm/fetchers/rest-api.d.ts.map +1 -1
  44. package/dist/esm/fetchers/rest-api.js +2 -2
  45. package/dist/esm/fetchers/rest-api.js.map +1 -1
  46. package/dist/esm/fetchers/turso.d.ts.map +1 -1
  47. package/dist/esm/fetchers/turso.js +1 -1
  48. package/dist/esm/fetchers/turso.js.map +1 -1
  49. package/dist/esm/pagination-plugin.d.ts.map +1 -1
  50. package/dist/esm/pagination-plugin.js +280 -165
  51. package/dist/esm/pagination-plugin.js.map +1 -1
  52. package/dist/esm/tsconfig.tsbuildinfo +1 -1
  53. package/package.json +2 -2
  54. package/src/count-fetchers.ts +2 -1
  55. package/src/fetchers/clickhouse.ts +12 -6
  56. package/src/fetchers/firestore.ts +45 -13
  57. package/src/fetchers/javascript.ts +48 -13
  58. package/src/fetchers/redshift.ts +32 -9
  59. package/src/fetchers/rest-api.ts +68 -6
  60. package/src/fetchers/turso.ts +46 -16
  61. package/src/pagination-plugin.ts +440 -296
@@ -0,0 +1,185 @@
1
+ # Count API Route Fix
2
+
3
+ ## Problem
4
+
5
+ When pages with pagination + search functionality were generated, the code included `useEffect` hooks that fetched from count API endpoints (e.g., `/api/cockroachdb-users-eab38133-count`), but these count API route files were not being created. This resulted in 404 errors when users searched.
6
+
7
+ ### Affected Code
8
+
9
+ In the generated page `index.js`:
10
+ ```javascript
11
+ useEffect(() => {
12
+ if (skipCountFetchOnMount_pg_1.current) {
13
+ skipCountFetchOnMount_pg_1.current = false
14
+ return
15
+ }
16
+ fetch(
17
+ `/api/cockroachdb-users-eab38133-count?${new URLSearchParams({
18
+ query: paginationState_pg_1.debouncedQuery,
19
+ queryColumns: JSON.stringify(['name']),
20
+ })}`
21
+ )
22
+ .then((res) => res.json())
23
+ .then((data) => {
24
+ if (data && 'count' in data) {
25
+ setPagination_pg_1_maxPages(
26
+ data.count === 0 ? 0 : Math.ceil(data.count / 3)
27
+ )
28
+ }
29
+ })
30
+ }, [paginationState_pg_1.debouncedQuery])
31
+ ```
32
+
33
+ But `/api/cockroachdb-users-eab38133-count.js` did **not exist**.
34
+
35
+ ## Root Cause
36
+
37
+ In `pagination-plugin.ts`, the `createAPIRoutesForPaginatedDataSources()` function:
38
+ ```typescript
39
+ // OLD CODE (line 2354)
40
+ if (isComponent) { // ❌ Only created for components!
41
+ // Create count API route
42
+ extractedResources[`api/${countFileName}`] = { ... }
43
+ }
44
+ ```
45
+
46
+ The count API route was only being created for **components**, but pages with search functionality also need it to refetch count when the search query changes.
47
+
48
+ ## Why Pages with Search Need Count Routes
49
+
50
+ ### Different Scenarios:
51
+
52
+ 1. **Pages with pagination ONLY (no search)**:
53
+ - Initial count fetched in `getStaticProps`
54
+ - Count is static - doesn't change during user interaction
55
+ - ✅ No count API route needed
56
+
57
+ 2. **Pages with pagination + SEARCH**:
58
+ - Initial count fetched in `getStaticProps` (for page 1, no search)
59
+ - Count changes when user searches
60
+ - Need to refetch count with search parameters
61
+ - ❌ **NEEDS count API route** (was missing!)
62
+
63
+ 3. **Components with pagination (with or without search)**:
64
+ - No `getStaticProps`
65
+ - Always fetch count client-side
66
+ - ✅ **NEEDS count API route** (was already working)
67
+
68
+ 4. **Search-only (no pagination)**:
69
+ - No pagination, no maxPages calculation needed
70
+ - ✅ No count route needed
71
+
72
+ ## Solution
73
+
74
+ Changed the condition from "only components" to "components OR pages with search":
75
+
76
+ ```typescript
77
+ // NEW CODE (lines 2344-2364)
78
+ const searchEnabledDataSources = new Set(
79
+ paginationInfos
80
+ .filter((info) => info.searchEnabled)
81
+ .map((info) => info.dataSourceIdentifier)
82
+ )
83
+
84
+ const createdCountRoutes = new Set<string>()
85
+
86
+ // ... in traverseForDataSources:
87
+ const hasSearch = searchEnabledDataSources.has(renderProp)
88
+ const needsCountRoute = isComponent || hasSearch // ✅ Now checks both!
89
+
90
+ if (needsCountRoute) {
91
+ const resourceDef = node.content.resourceDefinition
92
+ if (resourceDef) {
93
+ // ... create count route
94
+ if (!createdCountRoutes.has(countFileName)) {
95
+ extractedResources[`api/${countFileName}`] = {
96
+ fileName: countFileName,
97
+ fileType: FileType.JS,
98
+ path: ['pages', 'api'],
99
+ content: `import dataSource from '../../utils/data-sources/${fileName}'
100
+
101
+ export default dataSource.getCount
102
+ `,
103
+ }
104
+ createdCountRoutes.add(countFileName)
105
+ }
106
+ }
107
+ }
108
+ ```
109
+
110
+ ## Additional Improvements
111
+
112
+ 1. **Deduplication**: Added `createdCountRoutes` Set to prevent creating the same count route multiple times when the same data source is used with different perPage settings
113
+
114
+ 2. **Clear Logic**: The condition `needsCountRoute = isComponent || hasSearch` makes it explicit when count routes are needed
115
+
116
+ ## Generated Files
117
+
118
+ After the fix, for a page with search, the following files are generated:
119
+
120
+ ```
121
+ pages/
122
+ api/
123
+ cockroachdb-users-eab38133.js ✅ Main data API route
124
+ cockroachdb-users-eab38133-count.js ✅ Count API route (NOW CREATED!)
125
+
126
+ utils/
127
+ data-sources/
128
+ cockroachdb-users-eab38133.js ✅ Data source module with fetchData, fetchCount, handler, getCount
129
+ ```
130
+
131
+ ## Testing
132
+
133
+ Verified with the provided UIDL containing:
134
+ - Table with pagination + search
135
+ - Array mapper with pagination + search
136
+ - Both bound to the same CockroachDB data source
137
+
138
+ Results:
139
+ - ✅ Count API route created: `/api/cockroachdb-users-eab38133-count.js`
140
+ - ✅ File exports `dataSource.getCount` handler
141
+ - ✅ Search functionality works without 404 errors
142
+ - ✅ Count updates correctly when search query changes
143
+
144
+ ## Edge Cases Handled
145
+
146
+ 1. ✅ **Multiple data sources with search**: Each gets its own count route
147
+ 2. ✅ **Same data source, different perPage**: Count route created once, shared
148
+ 3. ✅ **Pages without search**: Count route not created (unnecessary)
149
+ 4. ✅ **Components**: Count route still created (backward compatible)
150
+ 5. ✅ **Search-only (no pagination)**: No count route created (correct)
151
+
152
+ ## Files Modified
153
+
154
+ - `packages/teleport-plugin-next-data-source/src/pagination-plugin.ts`:
155
+ - `createAPIRoutesForPaginatedDataSources()`: Updated condition to create count routes for pages with search
156
+ - Added `searchEnabledDataSources` Set
157
+ - Added `createdCountRoutes` Set for deduplication
158
+ - Changed `if (isComponent)` to `if (needsCountRoute)`
159
+
160
+ ## Regression Prevention
161
+
162
+ This fix aligns with the comment on line 353 of the same file:
163
+ ```typescript
164
+ // Add useEffect to refetch count when search changes (for both pages and components)
165
+ ```
166
+
167
+ The useEffect was already being created for both pages and components, but the count API route was only being created for components. Now both are consistent.
168
+
169
+ ## Related Issues
170
+
171
+ This issue was introduced when search functionality was added to pages. The original pagination implementation (without search) correctly didn't create count routes for pages because:
172
+ - Pages got initial count in `getStaticProps`
173
+ - Count never changed during user interaction (no search)
174
+
175
+ When search was added, the requirement changed:
176
+ - Count now needs to be refetched when search query changes
177
+ - But the count API route creation wasn't updated
178
+
179
+ ## Future Considerations
180
+
181
+ If additional features are added that require dynamic count updates (e.g., filters, sorting), the same pattern should be followed:
182
+ 1. Create the useEffect to refetch count
183
+ 2. Ensure the count API route is created
184
+ 3. Add the feature to the `searchEnabledDataSources` check (or create a new check)
185
+
@@ -0,0 +1,96 @@
1
+ # Search State Bug Fix Summary
2
+
3
+ ## Problem
4
+ When multiple data providers with search functionality were bound to the same data source in a project, all search inputs were incorrectly writing to the same state variable, causing search functionality to interfere between different list/table components.
5
+
6
+ ## Root Causes
7
+
8
+ ### 1. Fallback Matching Logic
9
+ The `modifySearchInputs` function had a problematic fallback:
10
+ ```typescript
11
+ } else if (className && className.includes('search-input')) {
12
+ // Fallback: match any input with 'search-input' in class
13
+ addSearchInputHandlers(node, info)
14
+ }
15
+ ```
16
+ This caused EVERY search input to match EVERY pagination info, with the last match overwriting previous handlers.
17
+
18
+ ### 2. No Duplicate Prevention
19
+ The function didn't track which inputs had already been modified, allowing multiple matches to overwrite handlers.
20
+
21
+ ### 3. Limited Detection Scope
22
+ The detection logic only looked at immediate siblings of DataProviders, missing search inputs in nested structures (e.g., table layouts where the search input is at a different nesting level).
23
+
24
+ ## Solutions Implemented
25
+
26
+ ### 1. Removed Fallback Logic
27
+ Changed from `forEach` to `for` loop with `break` statement to ensure only ONE match per input:
28
+ ```typescript
29
+ for (let index = 0; index < detectedPaginations.length; index++) {
30
+ const detected = detectedPaginations[index]
31
+ const info = paginationInfos[index]
32
+
33
+ if (className === detected.searchInputClass) {
34
+ addSearchInputHandlers(node, info)
35
+ modifiedInputs.add(node)
36
+ break // Stop after first match
37
+ }
38
+ }
39
+ ```
40
+
41
+ ### 2. Added Duplicate Tracking
42
+ Introduced a `Set` to track modified inputs:
43
+ ```typescript
44
+ const modifiedInputs = new Set<any>()
45
+ // ...
46
+ if (className && !modifiedInputs.has(node)) {
47
+ // Only process if not already modified
48
+ }
49
+ ```
50
+
51
+ ### 3. Improved Detection for Nested Structures
52
+ Enhanced the detection logic to walk up the JSX tree to find search inputs and pagination controls that aren't direct siblings:
53
+ ```typescript
54
+ const findSearchAndPaginationInScope = (scopeNode: any, skipNode: any = null): void => {
55
+ // Recursively search through children
56
+ }
57
+
58
+ let currentScope = parent
59
+ let depth = 0
60
+ const maxDepth = 5
61
+
62
+ while (currentScope && (!searchInputInfo || !paginationNodeInfo) && depth < maxDepth) {
63
+ findSearchAndPaginationInScope(currentScope, depth === 0 ? dataProvider : null)
64
+ currentScope = findParentNode(blockStatement, currentScope)
65
+ depth++
66
+ }
67
+ ```
68
+
69
+ ### 4. Applied Same Fix to Pagination Buttons
70
+ Applied the same pattern to `modifyPaginationButtons` for consistency and to prevent similar issues with button handlers.
71
+
72
+ ## Edge Cases Handled
73
+
74
+ 1. ✅ **Multiple data sources on same page**: Each gets unique state variables (e.g., `search_pg_0_query`, `search_pg_1_query`)
75
+ 2. ✅ **Nested structures (tables)**: Detection walks up the tree to find search inputs
76
+ 3. ✅ **Same data source, different components**: Each instance maintains independent state
77
+ 4. ✅ **Mixed pagination + search**: Proper state management with combined `paginationState` object
78
+ 5. ✅ **Search-only mappers**: Separate handling preserves functionality
79
+
80
+ ## Testing
81
+
82
+ Verified with UIDL containing:
83
+ - Table with search and pagination
84
+ - Array mapper with search and pagination
85
+ - Both bound to the same CockroachDB data source
86
+
87
+ Result: Each search input now has its own state variable and handlers, functioning independently without interference.
88
+
89
+ ## Files Modified
90
+
91
+ 1. `packages/teleport-plugin-next-data-source/src/pagination-plugin.ts`
92
+ - `modifySearchInputs()`: Removed fallback, added tracking, break on match
93
+ - `modifyPaginationButtons()`: Applied same pattern for consistency
94
+ - `detectPaginationsAndSearchFromJSX()`: Enhanced detection for nested structures
95
+ - `findParentNode()`: New helper to walk up the tree
96
+
@@ -1 +1 @@
1
- {"version":3,"file":"count-fetchers.d.ts","sourceRoot":"","sources":["../../src/count-fetchers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAA;AAC3D,OAAO,EAAE,8BAA8B,EAAE,MAAM,uBAAuB,CAAA;AACtE,OAAO,EAAE,yBAAyB,EAAE,MAAM,kBAAkB,CAAA;AAC5D,OAAO,EAAE,2BAA2B,EAAE,MAAM,oBAAoB,CAAA;AAChE,OAAO,EAAE,2BAA2B,EAAE,MAAM,oBAAoB,CAAA;AAChE,OAAO,EAAE,4BAA4B,EAAE,MAAM,qBAAqB,CAAA;AAClE,OAAO,EAAE,8BAA8B,EAAE,MAAM,uBAAuB,CAAA;AACtE,OAAO,EAAE,oCAAoC,EAAE,MAAM,8BAA8B,CAAA;AACnF,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAA;AAE7D,OAAO,EACL,8BAA8B,EAC9B,yBAAyB,EACzB,2BAA2B,EAC3B,2BAA2B,EAC3B,4BAA4B,EAC5B,8BAA8B,EAC9B,oCAAoC,EACpC,uBAAuB,GACxB,CAAA;AAED,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,cAAc,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CA6E1F"}
1
+ {"version":3,"file":"count-fetchers.d.ts","sourceRoot":"","sources":["../../src/count-fetchers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAA;AAC3D,OAAO,EAAE,8BAA8B,EAAE,MAAM,uBAAuB,CAAA;AACtE,OAAO,EAAE,yBAAyB,EAAE,MAAM,kBAAkB,CAAA;AAC5D,OAAO,EAAE,2BAA2B,EAAE,MAAM,oBAAoB,CAAA;AAChE,OAAO,EAAE,2BAA2B,EAAE,MAAM,oBAAoB,CAAA;AAChE,OAAO,EAAE,4BAA4B,EAAE,MAAM,qBAAqB,CAAA;AAClE,OAAO,EAAE,8BAA8B,EAAE,MAAM,uBAAuB,CAAA;AACtE,OAAO,EAAE,oCAAoC,EAAE,MAAM,8BAA8B,CAAA;AACnF,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAA;AAE7D,OAAO,EACL,8BAA8B,EAC9B,yBAAyB,EACzB,2BAA2B,EAC3B,2BAA2B,EAC3B,4BAA4B,EAC5B,8BAA8B,EAC9B,oCAAoC,EACpC,uBAAuB,GACxB,CAAA;AAED,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,cAAc,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CA8E1F"}
@@ -39,7 +39,7 @@ function generateCountFetcher(dataSource, tableName) {
39
39
  case 'csv-file':
40
40
  return (0, csv_file_1.generateCSVCountFetcher)(config);
41
41
  default:
42
- return "\nasync function getCount(req, res) {\n try {\n const fakeReq = { query: {}, method: 'GET' }\n let result = null\n let statusCode = 200\n \n const fakeRes = {\n status: (code) => {\n statusCode = code\n return fakeRes\n },\n json: (data) => {\n result = data\n return fakeRes\n },\n }\n \n await handler(fakeReq, fakeRes)\n \n if (statusCode !== 200 || !result || !result.success) {\n return res.status(500).json({\n success: false,\n error: 'Failed to get data for counting',\n timestamp: Date.now()\n })\n }\n \n const count = Array.isArray(result.data) ? result.data.length : 0\n \n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n";
42
+ return "\nasync function getCount(req, res) {\n try {\n const { query, queryColumns, filters } = req.query\n const fakeReq = { query: { query, queryColumns, filters }, method: 'GET' }\n let result = null\n let statusCode = 200\n \n const fakeRes = {\n status: (code) => {\n statusCode = code\n return fakeRes\n },\n json: (data) => {\n result = data\n return fakeRes\n },\n }\n \n await handler(fakeReq, fakeRes)\n \n if (statusCode !== 200 || !result || !result.success) {\n return res.status(500).json({\n success: false,\n error: 'Failed to get data for counting',\n timestamp: Date.now()\n })\n }\n \n const count = Array.isArray(result.data) ? result.data.length : 0\n \n return res.status(200).json({\n success: true,\n count: count,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Error getting count:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to get count',\n timestamp: Date.now()\n })\n }\n}\n";
43
43
  }
44
44
  }
45
45
  exports.generateCountFetcher = generateCountFetcher;
@@ -1 +1 @@
1
- {"version":3,"file":"count-fetchers.js","sourceRoot":"","sources":["../../src/count-fetchers.ts"],"names":[],"mappings":";;;AACA,oDAAsE;AAUpE,+GAVO,2CAA8B,OAUP;AAThC,0CAA4D;AAU1D,0GAVO,iCAAyB,OAUP;AAT3B,8CAAgE;AAU9D,4GAVO,qCAA2B,OAUP;AAT7B,8CAAgE;AAU9D,4GAVO,qCAA2B,OAUP;AAT7B,gDAAkE;AAUhE,6GAVO,uCAA4B,OAUP;AAT9B,oDAAsE;AAUpE,+GAVO,2CAA8B,OAUP;AAThC,kEAAmF;AAUjF,qHAVO,wDAAoC,OAUP;AATtC,gDAA6D;AAU3D,wGAVO,kCAAuB,OAUP;AAGzB,SAAgB,oBAAoB,CAAC,UAA0B,EAAE,SAAiB;IACxE,IAAA,IAAI,GAAa,UAAU,KAAvB,EAAE,MAAM,GAAK,UAAU,OAAf,CAAe;IAEnC,QAAQ,IAAI,EAAE;QACZ,KAAK,YAAY,CAAC;QAClB,KAAK,aAAa;YAChB,OAAO,IAAA,2CAA8B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAE1D,KAAK,OAAO,CAAC;QACb,KAAK,MAAM;YACT,OAAO,IAAA,iCAAyB,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAErD,KAAK,SAAS;YACZ,OAAO,IAAA,qCAA2B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAEvD,KAAK,SAAS;YACZ,OAAO,IAAA,qCAA2B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAEvD,KAAK,UAAU;YACb,OAAO,IAAA,uCAA4B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAExD,KAAK,YAAY;YACf,OAAO,IAAA,2CAA8B,EAAC,MAAM,CAAC,CAAA;QAE/C,KAAK,mBAAmB;YACtB,OAAO,IAAA,wDAAoC,EAAC,MAAM,CAAC,CAAA;QAErD,KAAK,UAAU;YACb,OAAO,IAAA,kCAAuB,EAAC,MAAM,CAAC,CAAA;QAExC;YACE,OAAO,4hCA4CZ,CAAA;KACE;AACH,CAAC;AA7ED,oDA6EC"}
1
+ {"version":3,"file":"count-fetchers.js","sourceRoot":"","sources":["../../src/count-fetchers.ts"],"names":[],"mappings":";;;AACA,oDAAsE;AAUpE,+GAVO,2CAA8B,OAUP;AAThC,0CAA4D;AAU1D,0GAVO,iCAAyB,OAUP;AAT3B,8CAAgE;AAU9D,4GAVO,qCAA2B,OAUP;AAT7B,8CAAgE;AAU9D,4GAVO,qCAA2B,OAUP;AAT7B,gDAAkE;AAUhE,6GAVO,uCAA4B,OAUP;AAT9B,oDAAsE;AAUpE,+GAVO,2CAA8B,OAUP;AAThC,kEAAmF;AAUjF,qHAVO,wDAAoC,OAUP;AATtC,gDAA6D;AAU3D,wGAVO,kCAAuB,OAUP;AAGzB,SAAgB,oBAAoB,CAAC,UAA0B,EAAE,SAAiB;IACxE,IAAA,IAAI,GAAa,UAAU,KAAvB,EAAE,MAAM,GAAK,UAAU,OAAf,CAAe;IAEnC,QAAQ,IAAI,EAAE;QACZ,KAAK,YAAY,CAAC;QAClB,KAAK,aAAa;YAChB,OAAO,IAAA,2CAA8B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAE1D,KAAK,OAAO,CAAC;QACb,KAAK,MAAM;YACT,OAAO,IAAA,iCAAyB,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAErD,KAAK,SAAS;YACZ,OAAO,IAAA,qCAA2B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAEvD,KAAK,SAAS;YACZ,OAAO,IAAA,qCAA2B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAEvD,KAAK,UAAU;YACb,OAAO,IAAA,uCAA4B,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;QAExD,KAAK,YAAY;YACf,OAAO,IAAA,2CAA8B,EAAC,MAAM,CAAC,CAAA;QAE/C,KAAK,mBAAmB;YACtB,OAAO,IAAA,wDAAoC,EAAC,MAAM,CAAC,CAAA;QAErD,KAAK,UAAU;YACb,OAAO,IAAA,kCAAuB,EAAC,MAAM,CAAC,CAAA;QAExC;YACE,OAAO,knCA6CZ,CAAA;KACE;AACH,CAAC;AA9ED,oDA8EC"}
@@ -1 +1 @@
1
- {"version":3,"file":"clickhouse.d.ts","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAkBpC,CAAA;AAQD,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MA6FF,CAAA"}
1
+ {"version":3,"file":"clickhouse.d.ts","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAkBpC,CAAA;AAQD,eAAO,MAAM,yBAAyB,WAC5B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAmGF,CAAA"}
@@ -23,7 +23,7 @@ var generateClickHouseFetcher = function (config, tableName) {
23
23
  var url = clickConfig.url;
24
24
  var username = clickConfig.username;
25
25
  var password = clickConfig.password;
26
- return "import { createClient } from '@clickhouse/client'\n\nlet client = null\n\nconst getClient = () => {\n if (client) return client\n \n client = createClient({\n url: ".concat(JSON.stringify(url), ",\n username: ").concat(JSON.stringify(username), ",\n password: ").concat((0, utils_1.replaceSecretReference)(password), "\n })\n \n return client\n}\n\nexport default async function handler(req, res) {\n try {\n const client = getClient()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n \n if (query && queryColumns) {\n const columns = JSON.parse(queryColumns)\n const searchConditions = columns.map(\n (col) => `positionCaseInsensitive(toString(${col}), '${query}') > 0`\n )\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const formattedValues = value\n .map((v) => (typeof v === 'string' ? `'${v}'` : v))\n .join(', ')\n conditions.push(`${key} IN (${formattedValues})`)\n } else if (typeof value === 'string') {\n conditions.push(`${key} = '${value}'`)\n } else {\n conditions.push(`${key} = ${value}`)\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query({ query: sql })\n const resultResponse = await result.json()\n const safeData = JSON.parse(JSON.stringify(resultResponse.data))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('ClickHouse fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
26
+ return "import { createClient } from '@clickhouse/client'\n\nlet client = null\n\nconst getClient = () => {\n if (client) return client\n \n client = createClient({\n url: ".concat(JSON.stringify(url), ",\n username: ").concat(JSON.stringify(username), ",\n password: ").concat((0, utils_1.replaceSecretReference)(password), "\n })\n \n return client\n}\n\nexport default async function handler(req, res) {\n try {\n const client = getClient()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n const searchConditions = columns.map(\n (col) => `positionCaseInsensitive(toString(${col}), '${query}') > 0`\n )\n conditions.push(`(${searchConditions.join(' OR ')})`)\n } else {\n // Note: Without queryColumns, ClickHouse can't search all columns efficiently\n // Users should provide queryColumns for optimal search performance\n console.warn('Search query provided without queryColumns - search may not work as expected')\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const formattedValues = value\n .map((v) => (typeof v === 'string' ? `'${v}'` : v))\n .join(', ')\n conditions.push(`${key} IN (${formattedValues})`)\n } else if (typeof value === 'string') {\n conditions.push(`${key} = '${value}'`)\n } else {\n conditions.push(`${key} = ${value}`)\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await client.query({ query: sql })\n const resultResponse = await result.json()\n const safeData = JSON.parse(JSON.stringify(resultResponse.data))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('ClickHouse fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
27
27
  };
28
28
  exports.generateClickHouseFetcher = generateClickHouseFetcher;
29
29
  //# sourceMappingURL=clickhouse.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"clickhouse.js","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ,EAAE;QACjD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;KAC/D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iCAAiC,EAAE,CAAA;KACpE;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iCAAiC,EAAE,CAAA;KACpE;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AApBY,QAAA,wBAAwB,4BAoBpC;AAQM,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAA0B,CAAA;IAC9C,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAA;IAC3B,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,oLAQE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,8BACd,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,8BACxB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,0oCAqChB,SAAS,mkCAuCxC,CAAA;AACD,CAAC,CAAA;AAhGY,QAAA,yBAAyB,6BAgGrC"}
1
+ {"version":3,"file":"clickhouse.js","sourceRoot":"","sources":["../../../src/fetchers/clickhouse.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ,EAAE;QACjD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;KAC/D;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iCAAiC,EAAE,CAAA;KACpE;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iCAAiC,EAAE,CAAA;KACpE;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AApBY,QAAA,wBAAwB,4BAoBpC;AAQM,IAAM,yBAAyB,GAAG,UACvC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAA0B,CAAA;IAC9C,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAA;IAC3B,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IACrC,IAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAA;IAErC,OAAO,oLAQE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,8BACd,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,8BACxB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,siDA2ChB,SAAS,mkCAuCxC,CAAA;AACD,CAAC,CAAA;AAtGY,QAAA,yBAAyB,6BAsGrC"}
@@ -1 +1 @@
1
- {"version":3,"file":"firestore.d.ts","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAyBpC,CAAA;AAOD,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkGF,CAAA"}
1
+ {"version":3,"file":"firestore.d.ts","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAyBpC,CAAA;AAOD,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkIF,CAAA"}
@@ -29,7 +29,7 @@ exports.validateFirestoreConfig = validateFirestoreConfig;
29
29
  var generateFirestoreFetcher = function (config, tableName) {
30
30
  var firestoreConfig = config;
31
31
  var serviceAccount = firestoreConfig.serviceAccount;
32
- return "import * as admin from 'firebase-admin'\n\nlet firestore = null\n\nconst getFirestore = () => {\n if (firestore) return firestore\n \n const rawServiceAccount = ".concat((0, utils_1.replaceSecretReference)(serviceAccount), "\n let serviceAccount\n\n try {\n serviceAccount = JSON.parse(rawServiceAccount)\n } catch (error) {\n throw new Error('Invalid Firestore service account JSON: ' + error.message)\n }\n \n if (!admin.apps.length) {\n admin.initializeApp({\n credential: admin.credential.cert(serviceAccount)\n })\n }\n \n firestore = admin.firestore()\n return firestore\n}\n\nexport default async function handler(req, res) {\n try {\n const firestore = getFirestore()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let queryRef = firestore.collection('").concat(tableName, "')\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n queryRef = queryRef.where(key, 'in', value)\n } else {\n queryRef = queryRef.where(key, '==', value)\n }\n })\n }\n \n if (query && queryColumns) {\n const columns = JSON.parse(queryColumns)\n for (const column of columns) {\n queryRef = queryRef\n .where(column, '>=', query)\n .where(column, '<=', query + '\\uf8ff')\n }\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? 'desc' : 'asc'\n queryRef = queryRef.orderBy(sortBy, sortOrderValue)\n }\n \n const limitValue = limit || perPage\n if (limitValue) {\n queryRef = queryRef.limit(parseInt(limitValue))\n }\n \n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage && parseInt(page) > 1 ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n if (offsetValue !== undefined) {\n queryRef = queryRef.offset(offsetValue)\n }\n \n const snapshot = await queryRef.get()\n const documents = []\n snapshot.forEach((doc) => {\n documents.push({\n id: doc.id,\n ...doc.data()\n })\n })\n \n const safeData = JSON.parse(JSON.stringify(documents))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Firestore fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
32
+ return "import * as admin from 'firebase-admin'\n\nlet firestore = null\n\nconst getFirestore = () => {\n if (firestore) return firestore\n \n const rawServiceAccount = ".concat((0, utils_1.replaceSecretReference)(serviceAccount), "\n let serviceAccount\n\n try {\n serviceAccount = JSON.parse(rawServiceAccount)\n } catch (error) {\n throw new Error('Invalid Firestore service account JSON: ' + error.message)\n }\n \n if (!admin.apps.length) {\n admin.initializeApp({\n credential: admin.credential.cert(serviceAccount)\n })\n }\n \n firestore = admin.firestore()\n return firestore\n}\n\nexport default async function handler(req, res) {\n try {\n const firestore = getFirestore()\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let queryRef = firestore.collection('").concat(tableName, "')\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n queryRef = queryRef.where(key, 'in', value)\n } else {\n queryRef = queryRef.where(key, '==', value)\n }\n })\n }\n \n let usePostFiltering = false\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n for (const column of columns) {\n queryRef = queryRef\n .where(column, '>=', query)\n .where(column, '<=', query + '\\uf8ff')\n }\n } else {\n // Firestore doesn't support full-text search without queryColumns\n // We'll fetch all data and filter in JavaScript\n usePostFiltering = true\n }\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? 'desc' : 'asc'\n queryRef = queryRef.orderBy(sortBy, sortOrderValue)\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage && parseInt(page) > 1 ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n // Only apply pagination at query level if not post-filtering\n if (!usePostFiltering) {\n if (limitValue) {\n queryRef = queryRef.limit(parseInt(limitValue))\n }\n if (offsetValue !== undefined) {\n queryRef = queryRef.offset(offsetValue)\n }\n }\n \n const snapshot = await queryRef.get()\n let documents = []\n snapshot.forEach((doc) => {\n documents.push({\n id: doc.id,\n ...doc.data()\n })\n })\n \n // Apply post-filtering if needed\n if (usePostFiltering && query) {\n const searchQuery = query.toLowerCase()\n documents = documents.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n \n // Apply pagination after filtering\n if (limitValue) {\n const start = offsetValue || 0\n documents = documents.slice(start, start + parseInt(limitValue))\n } else if (offsetValue) {\n documents = documents.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(documents))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Firestore fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
33
33
  };
34
34
  exports.generateFirestoreFetcher = generateFirestoreFetcher;
35
35
  //# sourceMappingURL=firestore.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"firestore.js","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,uBAAuB,GAAG,UACrC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,cAAc,IAAI,OAAO,MAAM,CAAC,cAAc,KAAK,QAAQ,EAAE;QACvE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4CAA4C,EAAE,CAAA;KAC/E;IAED,IAAM,cAAc,GAAG,MAAM,CAAC,cAAwB,CAAA;IAEtD,iGAAiG;IACjG,sEAAsE;IACtE,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE;QACrD,IAAI;YACF,IAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;YACzC,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACrE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kDAAkD,EAAE,CAAA;aACrF;SACF;QAAC,WAAM;YACN,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,oCAAoC,EAAE,CAAA;SACvE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA3BY,QAAA,uBAAuB,2BA2BnC;AAOM,IAAM,wBAAwB,GAAG,UACtC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,eAAe,GAAG,MAAyB,CAAA;IACjD,IAAM,cAAc,GAAG,eAAe,CAAC,cAAc,CAAA;IAErD,OAAO,8KAOqB,IAAA,8BAAsB,EAAC,cAAc,CAAC,yoBAwBzB,SAAS,wvDA8DnD,CAAA;AACD,CAAC,CAAA;AArGY,QAAA,wBAAwB,4BAqGpC"}
1
+ {"version":3,"file":"firestore.js","sourceRoot":"","sources":["../../../src/fetchers/firestore.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,uBAAuB,GAAG,UACrC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,cAAc,IAAI,OAAO,MAAM,CAAC,cAAc,KAAK,QAAQ,EAAE;QACvE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,4CAA4C,EAAE,CAAA;KAC/E;IAED,IAAM,cAAc,GAAG,MAAM,CAAC,cAAwB,CAAA;IAEtD,iGAAiG;IACjG,sEAAsE;IACtE,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE;QACrD,IAAI;YACF,IAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;YACzC,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACrE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,kDAAkD,EAAE,CAAA;aACrF;SACF;QAAC,WAAM;YACN,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,oCAAoC,EAAE,CAAA;SACvE;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA3BY,QAAA,uBAAuB,2BA2BnC;AAOM,IAAM,wBAAwB,GAAG,UACtC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,eAAe,GAAG,MAAyB,CAAA;IACjD,IAAM,cAAc,GAAG,eAAe,CAAC,cAAc,CAAA;IAErD,OAAO,8KAOqB,IAAA,8BAAsB,EAAC,cAAc,CAAC,yoBAwBzB,SAAS,o2FA8FnD,CAAA;AACD,CAAC,CAAA;AArIY,QAAA,wBAAwB,4BAqIpC"}
@@ -1 +1 @@
1
- {"version":3,"file":"javascript.d.ts","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA2BpC,CAAA;AAMD,eAAO,MAAM,yBAAyB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAgE3E,CAAA;AAGD,eAAO,MAAM,8BAA8B,YAAa,GAAG,KAAG,MA+C7D,CAAA"}
1
+ {"version":3,"file":"javascript.d.ts","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,wBAAwB,WAC3B,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA2BpC,CAAA;AAMD,eAAO,MAAM,yBAAyB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MAmG3E,CAAA;AAGD,eAAO,MAAM,8BAA8B,YAAa,GAAG,KAAG,MA+C7D,CAAA"}
@@ -29,7 +29,7 @@ var validateJavaScriptConfig = function (config) {
29
29
  exports.validateJavaScriptConfig = validateJavaScriptConfig;
30
30
  var generateJavaScriptFetcher = function (config) {
31
31
  var jsConfig = config;
32
- return "export default async function handler(req, res) {\n try {\n const { limit, offset, page, perPage, query, queryColumns } = req.query\n \n const code = ".concat(JSON.stringify(jsConfig.code), "\n const executeCode = new Function('return ' + code)\n let data = executeCode()\n \n if (Array.isArray(data)) {\n if (query) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n // Search specific columns\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n data = data.filter(item => {\n return columns.some(col => {\n const value = item[col]\n if (value === null || value === undefined) return false\n return String(value).toLowerCase().includes(searchQuery)\n })\n })\n } else {\n // Search across all fields by stringifying the entire record\n data = data.filter(item => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : 0)\n \n if (limitValue) {\n data = data.slice(offsetValue, offsetValue + parseInt(limitValue))\n } else if (offsetValue > 0) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('JavaScript execution error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to execute code',\n timestamp: Date.now()\n })\n }\n}\n");
32
+ return "export default async function handler(req, res) {\n try {\n const { limit, offset, page, perPage, query, queryColumns, sortBy, sortOrder, filters } = req.query\n \n const code = ".concat(JSON.stringify(jsConfig.code), "\n const executeCode = new Function('return ' + code)\n let data = executeCode()\n \n if (Array.isArray(data)) {\n // 1. Apply search filter\n if (query && query.trim()) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n try {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n data = data.filter(item => {\n return columns.some(col => {\n const value = item[col]\n if (value === null || value === undefined) return false\n return String(value).toLowerCase().includes(searchQuery)\n })\n })\n } catch (err) {\n console.error('Error parsing queryColumns:', err)\n }\n } else {\n data = data.filter(item => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n // 2. Apply custom filters\n if (filters) {\n try {\n const parsedFilters = typeof filters === 'string' ? JSON.parse(filters) : filters\n data = data.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n } catch (err) {\n console.error('Error parsing filters:', err)\n }\n }\n \n // 3. Apply sorting\n if (sortBy && sortBy.trim()) {\n data.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n // 4. Apply pagination\n const limitValue = limit || perPage\n const pageValue = page ? Math.max(1, parseInt(page)) : undefined\n const offsetValue = offset !== undefined ? Math.max(0, parseInt(offset)) : (pageValue && perPage ? (pageValue - 1) * Math.max(1, parseInt(perPage)) : 0)\n \n if (limitValue) {\n const limitInt = Math.max(1, parseInt(limitValue))\n data = data.slice(offsetValue, offsetValue + limitInt)\n } else if (offsetValue > 0) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('JavaScript execution error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to execute code',\n timestamp: Date.now()\n })\n }\n}\n");
33
33
  };
34
34
  exports.generateJavaScriptFetcher = generateJavaScriptFetcher;
35
35
  // tslint:disable-next-line:variable-name
@@ -1 +1 @@
1
- {"version":3,"file":"javascript.js","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":";;;AAAO,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAChF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,6BAA6B,EAAE,CAAA;KAChE;IAED,IAAM,iBAAiB,GAAG;QACxB,eAAe;QACf,YAAY;QACZ,YAAY;QACZ,gBAAgB;QAChB,YAAY;QACZ,WAAW;QACX,cAAc;KACf,CAAA;IAED,KAAsB,UAAiB,EAAjB,uCAAiB,EAAjB,+BAAiB,EAAjB,IAAiB,EAAE;QAApC,IAAM,OAAO,0BAAA;QAChB,IAAI,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAC7B,OAAO,CAAC,IAAI,CAAC,gFAAgF,CAAC,CAAA;YAC9F,MAAK;SACN;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA7BY,QAAA,wBAAwB,4BA6BpC;AAMM,IAAM,yBAAyB,GAAG,UAAC,MAA+B;IACvE,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,OAAO,0KAIU,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,m3DAyD/C,CAAA;AACD,CAAC,CAAA;AAhEY,QAAA,yBAAyB,6BAgErC;AAED,yCAAyC;AAClC,IAAM,8BAA8B,GAAG,UAAC,OAAY;IACzD,OAAO,gmCA6CR,CAAA;AACD,CAAC,CAAA;AA/CY,QAAA,8BAA8B,kCA+C1C"}
1
+ {"version":3,"file":"javascript.js","sourceRoot":"","sources":["../../../src/fetchers/javascript.ts"],"names":[],"mappings":";;;AAAO,IAAM,wBAAwB,GAAG,UACtC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAChF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,6BAA6B,EAAE,CAAA;KAChE;IAED,IAAM,iBAAiB,GAAG;QACxB,eAAe;QACf,YAAY;QACZ,YAAY;QACZ,gBAAgB;QAChB,YAAY;QACZ,WAAW;QACX,cAAc;KACf,CAAA;IAED,KAAsB,UAAiB,EAAjB,uCAAiB,EAAjB,+BAAiB,EAAjB,IAAiB,EAAE;QAApC,IAAM,OAAO,0BAAA;QAChB,IAAI,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAC7B,OAAO,CAAC,IAAI,CAAC,gFAAgF,CAAC,CAAA;YAC9F,MAAK;SACN;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA7BY,QAAA,wBAAwB,4BA6BpC;AAMM,IAAM,yBAAyB,GAAG,UAAC,MAA+B;IACvE,IAAM,QAAQ,GAAG,MAA0B,CAAA;IAC3C,OAAO,sMAIU,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,mjGA4F/C,CAAA;AACD,CAAC,CAAA;AAnGY,QAAA,yBAAyB,6BAmGrC;AAED,yCAAyC;AAClC,IAAM,8BAA8B,GAAG,UAAC,OAAY;IACzD,OAAO,gmCA6CR,CAAA;AACD,CAAC,CAAA;AA/CY,QAAA,8BAA8B,kCA+C1C"}
@@ -1 +1 @@
1
- {"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAyHF,CAAA"}
1
+ {"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":"AAaA,eAAO,MAAM,uBAAuB,WAC1B,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAgJF,CAAA"}
@@ -18,7 +18,9 @@ var generateRedshiftFetcher = function (config, tableName) {
18
18
  : sslConfig
19
19
  ? "{\n ".concat(sslConfig.ca ? "ca: ".concat((0, utils_1.replaceSecretReference)(sslConfig.ca), ",") : '', "\n ").concat(sslConfig.cert ? "cert: ".concat((0, utils_1.replaceSecretReference)(sslConfig.cert), ",") : '', "\n ").concat(sslConfig.key ? "key: ".concat((0, utils_1.replaceSecretReference)(sslConfig.key), ",") : '', "\n rejectUnauthorized: ").concat(sslConfig.rejectUnauthorized !== false, "\n }")
20
20
  : '{ rejectUnauthorized: false }' // Default to SSL with no cert verification for Redshift
21
- , "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n ").concat(schema ? "await pool.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query && queryColumns) {\n const columns = JSON.parse(queryColumns)\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await pool.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
21
+ , "\n })\n \n return pool\n}\n\nexport default async function handler(req, res) {\n try {\n const pool = getPool()\n ").concat(schema ? "await pool.query('SET search_path TO ".concat(schema, "')") : '', "\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const conditions = []\n const queryParams = []\n let paramIndex = 1\n \n if (query) {\n let columns = []\n \n if (queryColumns) {\n columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n } else {\n // Fallback: Get all columns from information_schema\n try {\n const schemaQuery = 'SELECT column_name FROM information_schema.columns WHERE table_name = $1' + \n ").concat(schema ? "' AND table_schema = $2'" : "''", " + \n ' ORDER BY ordinal_position'\n const schemaParams = ").concat(schema
22
+ ? "[".concat(JSON.stringify(tableName), ", ").concat(JSON.stringify(schema), "]")
23
+ : "[".concat(JSON.stringify(tableName), "]"), "\n const schemaResult = await pool.query(schemaQuery, schemaParams)\n columns = schemaResult.rows.map(row => row.column_name)\n } catch (schemaError) {\n console.warn('Failed to fetch column names from information_schema:', schemaError.message)\n }\n }\n \n if (columns.length > 0) {\n const searchConditions = columns.map((col) => {\n const condition = `${col}::text ILIKE $${paramIndex}`\n paramIndex++\n return condition\n })\n columns.forEach(() => queryParams.push(`%${query}%`))\n conditions.push(`(${searchConditions.join(' OR ')})`)\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => `$${paramIndex++}`)\n queryParams.push(...value)\n conditions.push(`${key} IN (${placeholders.join(', ')})`)\n } else {\n conditions.push(`${key} = $${paramIndex}`)\n queryParams.push(value)\n paramIndex++\n }\n })\n }\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n \n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`\n }\n \n if (sortBy) {\n sql += ` ORDER BY ${sortBy} ${sortOrder?.toUpperCase() || 'ASC'}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ${limitValue}`\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ${offsetValue}`\n }\n \n const result = await pool.query(sql, queryParams)\n const rows = Array.isArray(result?.rows) ? result.rows : []\n const plainRows = rows.map((row) =>\n row && typeof row.toJSON === 'function' ? row.toJSON() : row\n )\n const safeData = JSON.parse(JSON.stringify(plainRows))\n\n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Redshift fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
22
24
  };
23
25
  exports.generateRedshiftFetcher = generateRedshiftFetcher;
24
26
  //# sourceMappingURL=redshift.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"redshift.js","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,uBAAuB,GAAG,UACrC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,cAAc,GAAG,MAAwB,CAAA;IAC/C,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,GAAG,GAAG,cAAc,CAAC,GAAG,CAAA;IAC9B,IAAM,SAAS,GAAG,cAAc,CAAC,SAAS,CAAA;IAC1C,IAAM,MAAM,GAAG,MAAA,cAAc,CAAC,OAAO,0CAAE,MAAM,CAAA;IAE7C,OAAO,+IAQG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,0BACpB,IAAI,IAAI,IAAI,0BACZ,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,8BAChB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,8BAChC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,yBAElC,GAAG,KAAK,KAAK;QACX,CAAC,CAAC,+BAA+B;QACjC,CAAC,CAAC,SAAS;YACX,CAAC,CAAC,mBACF,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAClE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACxE,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,yCACjD,SAAS,CAAC,kBAAkB,KAAK,KAAK,YAC5D;YACE,CAAC,CAAC,+BAA+B,CAAC,wDAAwD;6IAU5F,MAAM,CAAC,CAAC,CAAC,+CAAwC,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,8nCAkCtC,SAAS,4rCA0CxC,CAAA;AACD,CAAC,CAAA;AA5HY,QAAA,uBAAuB,2BA4HnC"}
1
+ {"version":3,"file":"redshift.js","sourceRoot":"","sources":["../../../src/fetchers/redshift.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAa1C,IAAM,uBAAuB,GAAG,UACrC,MAA+B,EAC/B,SAAiB;;IAEjB,IAAM,cAAc,GAAG,MAAwB,CAAA;IAC/C,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAA;IAChC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAA;IACxC,IAAM,GAAG,GAAG,cAAc,CAAC,GAAG,CAAA;IAC9B,IAAM,SAAS,GAAG,cAAc,CAAC,SAAS,CAAA;IAC1C,IAAM,MAAM,GAAG,MAAA,cAAc,CAAC,OAAO,0CAAE,MAAM,CAAA;IAE7C,OAAO,+IAQG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,0BACpB,IAAI,IAAI,IAAI,0BACZ,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,8BAChB,IAAA,8BAAsB,EAAC,QAAQ,CAAC,8BAChC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,yBAElC,GAAG,KAAK,KAAK;QACX,CAAC,CAAC,+BAA+B;QACjC,CAAC,CAAC,SAAS;YACX,CAAC,CAAC,mBACF,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,cAAO,IAAA,8BAAsB,EAAC,SAAS,CAAC,EAAE,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBAClE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,gBAAS,IAAA,8BAAsB,EAAC,SAAS,CAAC,IAAI,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,qBACxE,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,eAAQ,IAAA,8BAAsB,EAAC,SAAS,CAAC,GAAG,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,yCACjD,SAAS,CAAC,kBAAkB,KAAK,KAAK,YAC5D;YACE,CAAC,CAAC,+BAA+B,CAAC,wDAAwD;6IAU5F,MAAM,CAAC,CAAC,CAAC,+CAAwC,MAAM,OAAI,CAAC,CAAC,CAAC,EAAE,2oBAiBxD,MAAM,CAAC,CAAC,CAAC,0BAA0B,CAAC,CAAC,CAAC,IAAI,2FAG5C,MAAM;QACJ,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,eAAK,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAG;QAC7D,CAAC,CAAC,WAAI,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAG,4sCAmChB,SAAS,4rCA0CxC,CAAA;AACD,CAAC,CAAA;AAnJY,QAAA,uBAAuB,2BAmJnC"}
@@ -1 +1 @@
1
- {"version":3,"file":"rest-api.d.ts","sourceRoot":"","sources":["../../../src/fetchers/rest-api.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA0BpC,CAAA;AA2CD,eAAO,MAAM,sBAAsB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MA4ExE,CAAA"}
1
+ {"version":3,"file":"rest-api.d.ts","sourceRoot":"","sources":["../../../src/fetchers/rest-api.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,qBAAqB,WACxB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CA0BpC,CAAA;AA2CD,eAAO,MAAM,sBAAsB,WAAY,OAAO,MAAM,EAAE,OAAO,CAAC,KAAG,MA0IxE,CAAA"}
@@ -48,11 +48,11 @@ var generateAuthCode = function (authorization) {
48
48
  var generateRESTAPIFetcher = function (config) {
49
49
  var restConfig = config;
50
50
  var authCode = generateAuthCode(restConfig.authorization || {});
51
- return "import fetch from 'node-fetch'\n\nexport default async function handler(req, res) {\n try {\n const { offset, limit } = req.query\n \n const url = ".concat(JSON.stringify(restConfig.url), "\n const method = ").concat(JSON.stringify(restConfig.method || 'GET'), "\n \n const headers = ").concat(JSON.stringify(restConfig.headers || {}), "\n ").concat(authCode, "\n \n const options = {\n method,\n headers\n }\n \n ").concat(restConfig.method === 'POST' || restConfig.method === 'PUT' || restConfig.method === 'PATCH'
51
+ return "import fetch from 'node-fetch'\n\nexport default async function handler(req, res) {\n try {\n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n const url = ".concat(JSON.stringify(restConfig.url), "\n const method = ").concat(JSON.stringify(restConfig.method || 'GET'), "\n \n const headers = ").concat(JSON.stringify(restConfig.headers || {}), "\n ").concat(authCode, "\n \n const options = {\n method,\n headers\n }\n \n ").concat(restConfig.method === 'POST' || restConfig.method === 'PUT' || restConfig.method === 'PATCH'
52
52
  ? "\n if (req.body) {\n options.body = ".concat(restConfig.bodyType === 'json' || !restConfig.bodyType
53
53
  ? 'JSON.stringify(req.body)'
54
54
  : 'req.body', "\n }\n ")
55
- : '', "\n \n const response = await fetch(url, options)\n \n if (!response.ok) {\n return res.status(response.status).json({\n success: false,\n error: `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n let data = await response.json()\n \n // Apply offset and limit if data is an array and parameters are provided\n if (Array.isArray(data)) {\n const offsetValue = offset !== undefined ? parseInt(offset) : 0\n const limitValue = limit !== undefined ? parseInt(limit) : undefined\n \n if (limitValue !== undefined) {\n data = data.slice(offsetValue, offsetValue + limitValue)\n } else if (offsetValue > 0) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('REST API fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
55
+ : '', "\n \n const response = await fetch(url, options)\n \n if (!response.ok) {\n return res.status(response.status).json({\n success: false,\n error: `HTTP ${response.status}: ${response.statusText}`,\n timestamp: Date.now()\n })\n }\n \n let data = await response.json()\n \n // Apply filtering, sorting, and pagination if data is an array\n if (Array.isArray(data)) {\n // 1. Apply search filter\n if (query && query.trim()) {\n const searchQuery = query.toLowerCase()\n \n if (queryColumns) {\n try {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n data = data.filter((item) => {\n return columns.some((col) => {\n const value = item[col]\n if (value === null || value === undefined) return false\n return String(value).toLowerCase().includes(searchQuery)\n })\n })\n } catch (err) {\n console.error('Error parsing queryColumns:', err)\n }\n } else {\n // Search across all fields\n data = data.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n }\n }\n \n // 2. Apply custom filters\n if (filters) {\n try {\n const parsedFilters = typeof filters === 'string' ? JSON.parse(filters) : filters\n data = data.filter((item) => {\n return Object.entries(parsedFilters).every(([key, value]) => {\n if (Array.isArray(value)) {\n return value.includes(item[key])\n }\n return item[key] === value\n })\n })\n } catch (err) {\n console.error('Error parsing filters:', err)\n }\n }\n \n // 3. Apply sorting\n if (sortBy && sortBy.trim()) {\n data.sort((a, b) => {\n const aVal = a[sortBy]\n const bVal = b[sortBy]\n const sortOrderValue = sortOrder?.toLowerCase() === 'desc' ? -1 : 1\n if (aVal < bVal) return -sortOrderValue\n if (aVal > bVal) return sortOrderValue\n return 0\n })\n }\n \n // 4. Apply pagination\n const limitValue = limit || perPage\n const pageValue = page ? Math.max(1, parseInt(page)) : undefined\n const offsetValue = offset !== undefined ? Math.max(0, parseInt(offset)) : (pageValue && perPage ? (pageValue - 1) * Math.max(1, parseInt(perPage)) : 0)\n \n if (limitValue) {\n const limitInt = Math.max(1, parseInt(limitValue))\n data = data.slice(offsetValue, offsetValue + limitInt)\n } else if (offsetValue > 0) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('REST API fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n }\n}\n");
56
56
  };
57
57
  exports.generateRESTAPIFetcher = generateRESTAPIFetcher;
58
58
  //# sourceMappingURL=rest-api.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"rest-api.js","sourceRoot":"","sources":["../../../src/fetchers/rest-api.ts"],"names":[],"mappings":";;;AAAO,IAAM,qBAAqB,GAAG,UACnC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAC7E,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAA;KACpD;IAED,IAAI;QACF,IAAM,GAAG,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAC/B,IAAI,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YAC/C,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;SACxE;KACF;IAAC,WAAM;QACN,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAA;KACvD;IAED,IAAI,MAAM,CAAC,MAAM,EAAE;QACjB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAA;QAC9D,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,EAAE;YAC5F,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qBAAqB,EAAE,CAAA;SACxD;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA5BY,QAAA,qBAAqB,yBA4BjC;AAaD,IAAM,gBAAgB,GAAG,UAAC,aAA4B;IACpD,IAAI,CAAC,aAAa,IAAI,aAAa,CAAC,IAAI,KAAK,MAAM,EAAE;QACnD,OAAO,EAAE,CAAA;KACV;IAEO,IAAA,IAAI,GAAkB,aAAa,KAA/B,EAAE,WAAW,GAAK,aAAa,YAAlB,CAAkB;IAE3C,QAAQ,IAAI,EAAE;QACZ,KAAK,SAAS;YACZ,OAAO,6CAAsC,WAAW,CAAC,MAAM,MAAG,CAAA;QACpE,KAAK,cAAc,CAAC;QACpB,KAAK,YAAY;YACf,OAAO,6CAAsC,WAAW,CAAC,KAAK,MAAG,CAAA;QACnE,KAAK,YAAY;YACf,OAAO,6DAAsD,WAAW,CAAC,QAAQ,cAAI,WAAW,CAAC,QAAQ,0BAAuB,CAAA;QAClI,KAAK,QAAQ;YACX,OAAO,6CAAsC,WAAW,CAAC,YAAY,MAAG,CAAA;QAC1E;YACE,OAAO,EAAE,CAAA;KACZ;AACH,CAAC,CAAA;AAUM,IAAM,sBAAsB,GAAG,UAAC,MAA+B;IACpE,IAAM,UAAU,GAAG,MAAuB,CAAA;IAC1C,IAAM,QAAQ,GAAG,gBAAgB,CAAC,UAAU,CAAC,aAAa,IAAI,EAAE,CAAC,CAAA;IAEjE,OAAO,uKAMS,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,kCAC3B,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,MAAM,IAAI,KAAK,CAAC,yCAEzC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,OAAO,IAAI,EAAE,CAAC,mBACxD,QAAQ,2FAQR,UAAU,CAAC,MAAM,KAAK,MAAM,IAAI,UAAU,CAAC,MAAM,KAAK,KAAK,IAAI,UAAU,CAAC,MAAM,KAAK,OAAO;QAC1F,CAAC,CAAC,sDAGF,UAAU,CAAC,QAAQ,KAAK,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ;YACpD,CAAC,CAAC,0BAA0B;YAC5B,CAAC,CAAC,UAAU,kBAGjB;QACG,CAAC,CAAC,EAAE,yrCA2CX,CAAA;AACD,CAAC,CAAA;AA5EY,QAAA,sBAAsB,0BA4ElC"}
1
+ {"version":3,"file":"rest-api.js","sourceRoot":"","sources":["../../../src/fetchers/rest-api.ts"],"names":[],"mappings":";;;AAAO,IAAM,qBAAqB,GAAG,UACnC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAC7E,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAA;KACpD;IAED,IAAI;QACF,IAAM,GAAG,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAC/B,IAAI,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YAC/C,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qCAAqC,EAAE,CAAA;SACxE;KACF;IAAC,WAAM;QACN,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAA;KACvD;IAED,IAAI,MAAM,CAAC,MAAM,EAAE;QACjB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAA;QAC9D,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,EAAE;YAC5F,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,qBAAqB,EAAE,CAAA;SACxD;KACF;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AA5BY,QAAA,qBAAqB,yBA4BjC;AAaD,IAAM,gBAAgB,GAAG,UAAC,aAA4B;IACpD,IAAI,CAAC,aAAa,IAAI,aAAa,CAAC,IAAI,KAAK,MAAM,EAAE;QACnD,OAAO,EAAE,CAAA;KACV;IAEO,IAAA,IAAI,GAAkB,aAAa,KAA/B,EAAE,WAAW,GAAK,aAAa,YAAlB,CAAkB;IAE3C,QAAQ,IAAI,EAAE;QACZ,KAAK,SAAS;YACZ,OAAO,6CAAsC,WAAW,CAAC,MAAM,MAAG,CAAA;QACpE,KAAK,cAAc,CAAC;QACpB,KAAK,YAAY;YACf,OAAO,6CAAsC,WAAW,CAAC,KAAK,MAAG,CAAA;QACnE,KAAK,YAAY;YACf,OAAO,6DAAsD,WAAW,CAAC,QAAQ,cAAI,WAAW,CAAC,QAAQ,0BAAuB,CAAA;QAClI,KAAK,QAAQ;YACX,OAAO,6CAAsC,WAAW,CAAC,YAAY,MAAG,CAAA;QAC1E;YACE,OAAO,EAAE,CAAA;KACZ;AACH,CAAC,CAAA;AAUM,IAAM,sBAAsB,GAAG,UAAC,MAA+B;IACpE,IAAM,UAAU,GAAG,MAAuB,CAAA;IAC1C,IAAM,QAAQ,GAAG,gBAAgB,CAAC,UAAU,CAAC,aAAa,IAAI,EAAE,CAAC,CAAA;IAEjE,OAAO,uOAMS,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,kCAC3B,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,MAAM,IAAI,KAAK,CAAC,yCAEzC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,OAAO,IAAI,EAAE,CAAC,mBACxD,QAAQ,2FAQR,UAAU,CAAC,MAAM,KAAK,MAAM,IAAI,UAAU,CAAC,MAAM,KAAK,KAAK,IAAI,UAAU,CAAC,MAAM,KAAK,OAAO;QAC1F,CAAC,CAAC,sDAGF,UAAU,CAAC,QAAQ,KAAK,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ;YACpD,CAAC,CAAC,0BAA0B;YAC5B,CAAC,CAAC,UAAU,kBAGjB;QACG,CAAC,CAAC,EAAE,q4GAyGX,CAAA;AACD,CAAC,CAAA;AA1IY,QAAA,sBAAsB,0BA0IlC"}
@@ -1 +1 @@
1
- {"version":3,"file":"turso.d.ts","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAcpC,CAAA;AASD,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAoGF,CAAA"}
1
+ {"version":3,"file":"turso.d.ts","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,mBAAmB,WACtB,OAAO,MAAM,EAAE,OAAO,CAAC,KAC9B;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAcpC,CAAA;AASD,eAAO,MAAM,oBAAoB,WACvB,OAAO,MAAM,EAAE,OAAO,CAAC,aACpB,MAAM,KAChB,MAkIF,CAAA"}
@@ -19,7 +19,7 @@ var generateTursoFetcher = function (config, tableName) {
19
19
  var tursoConfig = config;
20
20
  var databaseUrl = tursoConfig.databaseUrl;
21
21
  var token = tursoConfig.token;
22
- return "import { createClient } from '@libsql/client'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ".concat(JSON.stringify(databaseUrl), ",\n authToken: ").concat((0, utils_1.replaceSecretReference)(token), "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n const whereClauses = []\n const queryParams = []\n \n if (query && queryColumns) {\n const columns = JSON.parse(queryColumns)\n const searchConditions = columns.map((col) => `${col} LIKE ?`)\n whereClauses.push(`(${searchConditions.join(' OR ')})`)\n columns.forEach(() => {\n queryParams.push(`%${query}%`)\n })\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n whereClauses.push(`${key} IN (${placeholders})`)\n } else {\n whereClauses.push(`${key} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n if (whereClauses.length > 0) {\n sql += ` WHERE ${whereClauses.join(' AND ')}`\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toUpperCase() === 'DESC' ? 'DESC' : 'ASC'\n sql += ` ORDER BY ${sortBy} ${sortOrderValue}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n if (limitValue) {\n sql += ` LIMIT ?`\n queryParams.push(parseInt(limitValue))\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ?`\n queryParams.push(offsetValue)\n }\n \n const result = await client.execute({\n sql,\n args: queryParams\n })\n \n const data = result.rows.map((row) => {\n const obj = {}\n result.columns.forEach((col, idx) => {\n obj[col] = row[col]\n })\n return obj\n })\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Turso fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n client.close()\n }\n }\n}\n");
22
+ return "import { createClient } from '@libsql/client'\n\nexport default async function handler(req, res) {\n let client = null\n try {\n client = createClient({\n url: ".concat(JSON.stringify(databaseUrl), ",\n authToken: ").concat((0, utils_1.replaceSecretReference)(token), "\n })\n \n const { query, queryColumns, limit, page, perPage, sortBy, sortOrder, filters, offset } = req.query\n \n let sql = `SELECT * FROM ").concat(tableName, "`\n const whereClauses = []\n const queryParams = []\n let searchQueryColumns = null\n \n if (query) {\n if (queryColumns) {\n const columns = typeof queryColumns === 'string' ? JSON.parse(queryColumns) : (Array.isArray(queryColumns) ? queryColumns : [queryColumns])\n const searchConditions = columns.map((col) => `${col} LIKE ?`)\n whereClauses.push(`(${searchConditions.join(' OR ')})`)\n columns.forEach(() => {\n queryParams.push(`%${query}%`)\n })\n } else {\n // Store query for post-filtering if columns not specified\n searchQueryColumns = query\n }\n }\n \n if (filters) {\n const parsedFilters = JSON.parse(filters)\n Object.entries(parsedFilters).forEach(([key, value]) => {\n if (Array.isArray(value)) {\n const placeholders = value.map(() => '?').join(', ')\n queryParams.push(...value)\n whereClauses.push(`${key} IN (${placeholders})`)\n } else {\n whereClauses.push(`${key} = ?`)\n queryParams.push(value)\n }\n })\n }\n \n if (whereClauses.length > 0) {\n sql += ` WHERE ${whereClauses.join(' AND ')}`\n }\n \n if (sortBy) {\n const sortOrderValue = sortOrder?.toUpperCase() === 'DESC' ? 'DESC' : 'ASC'\n sql += ` ORDER BY ${sortBy} ${sortOrderValue}`\n }\n \n const limitValue = limit || perPage\n const offsetValue = offset !== undefined ? parseInt(offset) : (page && perPage ? (parseInt(page) - 1) * parseInt(perPage) : undefined)\n \n // Only apply SQL pagination if we're not doing post-filtering\n if (!searchQueryColumns) {\n if (limitValue) {\n sql += ` LIMIT ?`\n queryParams.push(parseInt(limitValue))\n }\n \n if (offsetValue !== undefined) {\n sql += ` OFFSET ?`\n queryParams.push(offsetValue)\n }\n }\n \n const result = await client.execute({\n sql,\n args: queryParams\n })\n \n let data = result.rows.map((row) => {\n const obj = {}\n result.columns.forEach((col, idx) => {\n obj[col] = row[col]\n })\n return obj\n })\n \n // Apply post-filtering for search without queryColumns\n if (searchQueryColumns) {\n const searchQuery = searchQueryColumns.toLowerCase()\n data = data.filter((item) => {\n try {\n const stringified = JSON.stringify(item).toLowerCase()\n return stringified.includes(searchQuery)\n } catch {\n return false\n }\n })\n \n // Apply pagination after filtering\n if (limitValue) {\n const start = offsetValue || 0\n data = data.slice(start, start + parseInt(limitValue))\n } else if (offsetValue) {\n data = data.slice(offsetValue)\n }\n }\n \n const safeData = JSON.parse(JSON.stringify(data))\n \n return res.status(200).json({\n success: true,\n data: safeData,\n timestamp: Date.now()\n })\n } catch (error) {\n console.error('Turso fetch error:', error)\n return res.status(500).json({\n success: false,\n error: error.message || 'Failed to fetch data',\n timestamp: Date.now()\n })\n } finally {\n if (client) {\n client.close()\n }\n }\n}\n");
23
23
  };
24
24
  exports.generateTursoFetcher = generateTursoFetcher;
25
25
  //# sourceMappingURL=turso.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"turso.js","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,mBAAmB,GAAG,UACjC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;QACjE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,gCAAgC,EAAE,CAAA;KACnE;IAED,IAAI,CAAC,MAAM,CAAC,KAAK,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ,EAAE;QACrD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,wCAAwC,EAAE,CAAA;KAC3E;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAhBY,QAAA,mBAAmB,uBAgB/B;AASM,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,WAAW,GAAG,WAAW,CAAC,WAAW,CAAA;IAC3C,IAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAA;IAE/B,OAAO,oLAMI,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,iCACrB,IAAA,8BAAsB,EAAC,KAAK,CAAC,yKAKhB,SAAS,4tEAkFxC,CAAA;AACD,CAAC,CAAA;AAvGY,QAAA,oBAAoB,wBAuGhC"}
1
+ {"version":3,"file":"turso.js","sourceRoot":"","sources":["../../../src/fetchers/turso.ts"],"names":[],"mappings":";;;AAAA,kCAAiD;AAE1C,IAAM,mBAAmB,GAAG,UACjC,MAA+B;IAE/B,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,+BAA+B,EAAE,CAAA;KAClE;IAED,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;QACjE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,gCAAgC,EAAE,CAAA;KACnE;IAED,IAAI,CAAC,MAAM,CAAC,KAAK,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ,EAAE;QACrD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,wCAAwC,EAAE,CAAA;KAC3E;IAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;AAC1B,CAAC,CAAA;AAhBY,QAAA,mBAAmB,uBAgB/B;AASM,IAAM,oBAAoB,GAAG,UAClC,MAA+B,EAC/B,SAAiB;IAEjB,IAAM,WAAW,GAAG,MAAqB,CAAA;IACzC,IAAM,WAAW,GAAG,WAAW,CAAC,WAAW,CAAA;IAC3C,IAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAA;IAE/B,OAAO,oLAMI,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,iCACrB,IAAA,8BAAsB,EAAC,KAAK,CAAC,yKAKhB,SAAS,2wGAgHxC,CAAA;AACD,CAAC,CAAA;AArIY,QAAA,oBAAoB,wBAqIhC"}
@@ -1 +1 @@
1
- {"version":3,"file":"pagination-plugin.d.ts","sourceRoot":"","sources":["../../src/pagination-plugin.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,eAAe,EACf,sBAAsB,EAGvB,MAAM,4BAA4B,CAAA;AAgBnC,eAAO,MAAM,qCAAqC,EAAE,sBAAsB,CAAC,EAAE,CA+tB5E,CAAA;;AA0iDD,wBAAsD"}
1
+ {"version":3,"file":"pagination-plugin.d.ts","sourceRoot":"","sources":["../../src/pagination-plugin.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,eAAe,EACf,sBAAsB,EAGvB,MAAM,4BAA4B,CAAA;AAgBnC,eAAO,MAAM,qCAAqC,EAAE,sBAAsB,CAAC,EAAE,CAuvB5E,CAAA;;AAkqDD,wBAAsD"}