ugly-app 0.1.310 → 0.1.312

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/cli/authCommands.js +1 -1
  2. package/dist/cli/authCommands.js.map +1 -1
  3. package/dist/cli/dev.d.ts.map +1 -1
  4. package/dist/cli/dev.js +0 -25
  5. package/dist/cli/dev.js.map +1 -1
  6. package/dist/cli/migrate.js +1 -1
  7. package/dist/cli/migrate.js.map +1 -1
  8. package/dist/cli/scaffold.d.ts.map +1 -1
  9. package/dist/cli/scaffold.js +4 -1
  10. package/dist/cli/scaffold.js.map +1 -1
  11. package/dist/cli/serverLogQuery.js +1 -1
  12. package/dist/cli/serverLogQuery.js.map +1 -1
  13. package/dist/cli/version.d.ts +1 -1
  14. package/dist/cli/version.js +1 -1
  15. package/dist/server/App.d.ts.map +1 -1
  16. package/dist/server/App.js +1 -5
  17. package/dist/server/App.js.map +1 -1
  18. package/dist/server/DataProxyClient.d.ts +1 -1
  19. package/dist/server/DataProxyClient.d.ts.map +1 -1
  20. package/dist/server/DataProxyClient.js +5 -10
  21. package/dist/server/DataProxyClient.js.map +1 -1
  22. package/dist/server/Logging.d.ts +1 -1
  23. package/dist/server/Logging.d.ts.map +1 -1
  24. package/dist/server/SchemaCheck.d.ts.map +1 -1
  25. package/dist/server/SchemaCheck.js +0 -1
  26. package/dist/server/SchemaCheck.js.map +1 -1
  27. package/dist/server/Socket.d.ts +1 -1
  28. package/dist/server/Socket.d.ts.map +1 -1
  29. package/eslint.config.js +1 -0
  30. package/package.json +1 -3
  31. package/src/cli/authCommands.ts +1 -1
  32. package/src/cli/dev.ts +0 -30
  33. package/src/cli/migrate.ts +1 -1
  34. package/src/cli/scaffold.ts +4 -1
  35. package/src/cli/serverLogQuery.ts +1 -1
  36. package/src/cli/version.ts +1 -1
  37. package/src/server/App.ts +1 -5
  38. package/src/server/DataProxyClient.ts +6 -12
  39. package/src/server/Logging.ts +1 -1
  40. package/src/server/SchemaCheck.ts +0 -1
  41. package/src/server/Socket.ts +1 -1
  42. package/src/server/Postgres.ts +0 -29
  43. package/src/server/PostgresDB.ts +0 -263
  44. package/src/server/PostgresFilter.ts +0 -162
  45. package/src/server/PostgresOperators.ts +0 -136
  46. package/src/server/PostgresPipeline.ts +0 -262
  47. package/src/server/PostgresSchema.ts +0 -51
  48. package/src/server/PostgresSearch.ts +0 -109
  49. package/src/server/Qdrant.ts +0 -110
  50. package/src/server/Storage.ts +0 -135
@@ -1,162 +0,0 @@
1
- /**
2
- * Translates MongoDB-style filter objects into PostgreSQL JSONB WHERE clauses.
3
- *
4
- * Supports: equality, $in, $nin, $gt, $gte, $lt, $lte, $ne, $exists.
5
- * Fields are accessed via JSONB operators (data->>'field' for text, casts for numbers/booleans).
6
- * Dot-notation paths (e.g. 'user.profile.name') are translated to -> chains.
7
- */
8
-
9
- export interface TranslatedFilter {
10
- where: string;
11
- values: unknown[];
12
- }
13
-
14
- /** Fields stored as top-level columns rather than inside the JSONB data column. */
15
- const TOP_LEVEL_COLUMNS = new Set(['_id', 'created', 'updated', 'version']);
16
-
17
- /** Top-level columns that store timestamps (TIMESTAMPTZ). Numeric values must be converted to Date. */
18
- const TIMESTAMP_COLUMNS = new Set(['created', 'updated']);
19
-
20
- /** Convert a value to a Date if it's a numeric epoch for a timestamp column. */
21
- function coerceTimestamp(field: string, value: unknown): unknown {
22
- if (TIMESTAMP_COLUMNS.has(field) && typeof value === 'number') {
23
- return new Date(value);
24
- }
25
- return value;
26
- }
27
-
28
- /** Build the JSONB accessor for a field path. Last segment uses ->> (text extraction). */
29
- function jsonbPath(field: string, asText = true): string {
30
- if (TOP_LEVEL_COLUMNS.has(field)) return `"${field}"`;
31
- const parts = field.split('.');
32
- if (parts.length === 1) {
33
- return asText ? `data->>'${parts[0]}'` : `data->'${parts[0]}'`;
34
- }
35
- const chain = parts.slice(0, -1).map((p) => `'${p}'`).join('->');
36
- const last = parts[parts.length - 1];
37
- return asText ? `data->${chain}->>'${last}'` : `data->${chain}->'${last}'`;
38
- }
39
-
40
- function isOperatorObject(value: unknown): value is Record<string, unknown> {
41
- if (typeof value !== 'object' || value === null || Array.isArray(value)) return false;
42
- return Object.keys(value).some((k) => k.startsWith('$'));
43
- }
44
-
45
- export function translateFilter(filter: Record<string, unknown>): TranslatedFilter {
46
- const clauses: string[] = [];
47
- const values: unknown[] = [];
48
- let paramIdx = 1;
49
-
50
- for (const [field, value] of Object.entries(filter)) {
51
- if (isOperatorObject(value)) {
52
- for (const [op, operand] of Object.entries(value)) {
53
- switch (op) {
54
- case '$in':
55
- // MongoDB: {field: {$in: [vals]}} matches if field is scalar and equals any val,
56
- // OR if field is an array and contains any val.
57
- // Top-level columns (_id, etc.) are plain TEXT — skip the JSONB overlap check.
58
- if (TOP_LEVEL_COLUMNS.has(field)) {
59
- clauses.push(`${jsonbPath(field)} = ANY($${paramIdx})`);
60
- } else {
61
- clauses.push(`(${jsonbPath(field)} = ANY($${paramIdx}) OR ${jsonbPath(field, false)} ?| $${paramIdx})`);
62
- }
63
- values.push(operand);
64
- paramIdx++;
65
- break;
66
- case '$nin':
67
- // Inverse: scalar not in ANY, AND array doesn't overlap
68
- if (TOP_LEVEL_COLUMNS.has(field)) {
69
- clauses.push(`${jsonbPath(field)} != ALL($${paramIdx})`);
70
- } else {
71
- clauses.push(`(${jsonbPath(field)} != ALL($${paramIdx}) AND NOT (${jsonbPath(field, false)} ?| $${paramIdx}))`);
72
- }
73
- values.push(operand);
74
- paramIdx++;
75
- break;
76
- case '$gt': {
77
- const acc = jsonbPath(field);
78
- const cast = TOP_LEVEL_COLUMNS.has(field) ? acc : `(${acc})::numeric`;
79
- clauses.push(`${cast} > $${paramIdx}`);
80
- values.push(coerceTimestamp(field, operand));
81
- paramIdx++;
82
- break;
83
- }
84
- case '$gte': {
85
- const acc = jsonbPath(field);
86
- const cast = TOP_LEVEL_COLUMNS.has(field) ? acc : `(${acc})::numeric`;
87
- clauses.push(`${cast} >= $${paramIdx}`);
88
- values.push(coerceTimestamp(field, operand));
89
- paramIdx++;
90
- break;
91
- }
92
- case '$lt': {
93
- const acc = jsonbPath(field);
94
- const cast = TOP_LEVEL_COLUMNS.has(field) ? acc : `(${acc})::numeric`;
95
- clauses.push(`${cast} < $${paramIdx}`);
96
- values.push(coerceTimestamp(field, operand));
97
- paramIdx++;
98
- break;
99
- }
100
- case '$lte': {
101
- const acc = jsonbPath(field);
102
- const cast = TOP_LEVEL_COLUMNS.has(field) ? acc : `(${acc})::numeric`;
103
- clauses.push(`${cast} <= $${paramIdx}`);
104
- values.push(coerceTimestamp(field, operand));
105
- paramIdx++;
106
- break;
107
- }
108
- case '$ne':
109
- clauses.push(`${jsonbPath(field)} != $${paramIdx}`);
110
- values.push(coerceTimestamp(field, operand));
111
- paramIdx++;
112
- break;
113
- case '$exists':
114
- if (operand) {
115
- clauses.push(`data ? '${field}'`);
116
- } else {
117
- clauses.push(`NOT (data ? '${field}')`);
118
- }
119
- break;
120
- default:
121
- throw new Error(`[DB] Unsupported filter operator: ${op}`);
122
- }
123
- }
124
- } else if (value === null) {
125
- clauses.push(`${jsonbPath(field, false)} IS NULL OR ${jsonbPath(field)} = 'null'`);
126
- } else if (typeof value === 'number') {
127
- if (TIMESTAMP_COLUMNS.has(field)) {
128
- clauses.push(`${jsonbPath(field)} = $${paramIdx}`);
129
- values.push(new Date(value));
130
- } else {
131
- clauses.push(`(${jsonbPath(field)})::numeric = $${paramIdx}`);
132
- values.push(value);
133
- }
134
- paramIdx++;
135
- } else if (typeof value === 'boolean') {
136
- clauses.push(`(${jsonbPath(field)})::boolean = $${paramIdx}`);
137
- values.push(value);
138
- paramIdx++;
139
- } else {
140
- // String or other — use text equality
141
- const accessor = field === '_id' ? '_id' : jsonbPath(field);
142
- clauses.push(`${accessor} = $${paramIdx}`);
143
- values.push(value);
144
- paramIdx++;
145
- }
146
- }
147
-
148
- return {
149
- where: clauses.join(' AND '),
150
- values,
151
- };
152
- }
153
-
154
- /** Translate MongoDB sort to ORDER BY clause. */
155
- export function translateSort(sort: Record<string, 1 | -1>): string {
156
- return Object.entries(sort)
157
- .map(([field, dir]) => {
158
- const accessor = TOP_LEVEL_COLUMNS.has(field) ? `"${field}"` : jsonbPath(field);
159
- return `${accessor} ${dir === 1 ? 'ASC' : 'DESC'}`;
160
- })
161
- .join(', ');
162
- }
@@ -1,136 +0,0 @@
1
- import { query } from './Postgres.js';
2
-
3
- /**
4
- * Translates MongoDB update operators ($set, $unset, $inc, $addToSet, $pull)
5
- * into a single PostgreSQL UPDATE statement using JSONB functions.
6
- *
7
- * Returns the updated document's data, or null if the document doesn't exist.
8
- */
9
-
10
- interface UpdateOp {
11
- $set?: Record<string, unknown>;
12
- $unset?: Record<string, string>;
13
- $inc?: Record<string, number>;
14
- $addToSet?: Record<string, unknown>;
15
- $pull?: Record<string, unknown>;
16
- }
17
-
18
- /** Convert a dot-notation path to a PostgreSQL path array: 'a.b.c' → '{a,b,c}' */
19
- function toPathArray(field: string): string {
20
- return `{${field.split('.').join(',')}}`;
21
- }
22
-
23
- /**
24
- * Build a SQL expression that applies all operators to the data column.
25
- * Each operator wraps the previous expression, building up the transformation.
26
- */
27
- function buildUpdateExpression(op: UpdateOp): { expr: string; values: unknown[]; paramStart: number } {
28
- let expr = 'data';
29
- const values: unknown[] = [];
30
- let paramIdx = 2; // $1 is always the _id
31
-
32
- // $set: jsonb_set for each field
33
- // For nested paths (e.g. 'buttons.botId'), ensure intermediate objects exist.
34
- // PostgreSQL's jsonb_set only creates the leaf key, not intermediate parents.
35
- // If the parent is null or missing, jsonb_set returns NULL which violates NOT NULL.
36
- if (op.$set) {
37
- for (const [field, value] of Object.entries(op.$set)) {
38
- const parts = field.split('.');
39
- if (parts.length > 1) {
40
- // Ensure each intermediate path exists as an object
41
- for (let i = 1; i < parts.length; i++) {
42
- const parentPath = toPathArray(parts.slice(0, i).join('.'));
43
- expr = `jsonb_set(${expr}, '${parentPath}', COALESCE(${expr}#>'${parentPath}', '{}'::jsonb))`;
44
- }
45
- }
46
- expr = `jsonb_set(${expr}, '${toPathArray(field)}', $${paramIdx}::jsonb)`;
47
- values.push(JSON.stringify(value));
48
- paramIdx++;
49
- }
50
- }
51
-
52
- // $unset: remove keys
53
- if (op.$unset) {
54
- for (const field of Object.keys(op.$unset)) {
55
- const parts = field.split('.');
56
- if (parts.length === 1) {
57
- expr = `(${expr}) - '${field}'`;
58
- } else {
59
- // For nested: remove the last key from the parent path.
60
- // COALESCE guards against NULL when the parent path doesn't exist,
61
- // which would cause jsonb_set to return NULL and violate NOT NULL.
62
- const parentPath = toPathArray(parts.slice(0, -1).join('.'));
63
- const lastKey = parts[parts.length - 1];
64
- expr = `jsonb_set(${expr}, '${parentPath}', COALESCE((${expr}#>'${parentPath}') - '${lastKey}', '{}'::jsonb))`;
65
- }
66
- }
67
- }
68
-
69
- // $inc: increment numeric values
70
- if (op.$inc) {
71
- for (const [field, amount] of Object.entries(op.$inc)) {
72
- const path = toPathArray(field);
73
- expr = `jsonb_set(${expr}, '${path}', to_jsonb(COALESCE((${expr}#>>'{${field.split('.').join(',')}}')::numeric, 0) + $${paramIdx}))`;
74
- values.push(amount);
75
- paramIdx++;
76
- }
77
- }
78
-
79
- // $addToSet: add to array if not present
80
- if (op.$addToSet) {
81
- for (const [field, value] of Object.entries(op.$addToSet)) {
82
- const path = toPathArray(field);
83
- const jsonVal = `$${paramIdx}::jsonb`;
84
- expr = `jsonb_set(${expr}, '${path}', CASE WHEN (${expr}#>'${path}') @> ${jsonVal} THEN (${expr}#>'${path}') ELSE (${expr}#>'${path}') || ${jsonVal} END)`;
85
- values.push(JSON.stringify(value));
86
- paramIdx++;
87
- }
88
- }
89
-
90
- // $pull: remove element from array
91
- if (op.$pull) {
92
- for (const [field, value] of Object.entries(op.$pull)) {
93
- const path = toPathArray(field);
94
- const jsonVal = `$${paramIdx}::jsonb`;
95
- expr = `jsonb_set(${expr}, '${path}', (SELECT COALESCE(jsonb_agg(elem), '[]'::jsonb) FROM jsonb_array_elements(${expr}#>'${path}') elem WHERE elem != ${jsonVal}))`;
96
- values.push(JSON.stringify(value));
97
- paramIdx++;
98
- }
99
- }
100
-
101
- return { expr, values, paramStart: paramIdx };
102
- }
103
-
104
- export async function applyUpdateOp(
105
- collection: string,
106
- id: string,
107
- op: UpdateOp,
108
- ): Promise<Record<string, unknown> | null> {
109
- const { expr, values } = buildUpdateExpression(op);
110
-
111
- const sql = `
112
- UPDATE "${collection}"
113
- SET data = ${expr}, updated = now(), version = version + 1
114
- WHERE _id = $1
115
- RETURNING data, _id, created, updated, version
116
- `;
117
-
118
- const result = await query<{
119
- data: Record<string, unknown>;
120
- _id: string;
121
- created: Date;
122
- updated: Date;
123
- version: number;
124
- }>(sql, [id, ...values]);
125
-
126
- if (result.rows.length === 0) return null;
127
-
128
- const row = result.rows[0];
129
- return {
130
- ...row.data,
131
- _id: row._id,
132
- created: row.created,
133
- updated: row.updated,
134
- version: row.version,
135
- };
136
- }
@@ -1,262 +0,0 @@
1
- /**
2
- * Translates a subset of MongoDB aggregation pipeline stages into PostgreSQL SQL.
3
- *
4
- * Supported stages: $match, $sort, $limit, $skip, $count, $group
5
- * Supported $group accumulators: $sum, $avg, $min, $max, $first, $last
6
- */
7
-
8
- import { translateFilter } from './PostgresFilter.js';
9
-
10
- interface QueryParts {
11
- select: string;
12
- from: string;
13
- where: string[];
14
- groupBy: string[];
15
- orderBy: string[];
16
- limit: number | null;
17
- offset: number | null;
18
- values: unknown[];
19
- paramIdx: number;
20
- /** True after a $group stage has been applied */
21
- grouped: boolean;
22
- }
23
-
24
- export interface TranslatedPipeline {
25
- sql: string;
26
- values: unknown[];
27
- }
28
-
29
- /** Build JSONB accessor for a field in the base table */
30
- function jsonbPath(field: string, asText = true): string {
31
- if (field === '_id') return '"_id"';
32
- const parts = field.split('.');
33
- if (parts.length === 1) {
34
- return asText ? `data->>'${parts[0]}'` : `data->'${parts[0]}'`;
35
- }
36
- const chain = parts.slice(0, -1).map((p) => `'${p}'`).join('->');
37
- const last = parts[parts.length - 1];
38
- return asText ? `data->${chain}->>'${last}'` : `data->${chain}->'${last}'`;
39
- }
40
-
41
- /** Wrap current query parts into a subquery, resetting for further stages */
42
- function wrapAsSubquery(parts: QueryParts): QueryParts {
43
- const innerSql = buildSql(parts);
44
- return {
45
- select: '*',
46
- from: `(${innerSql}) AS _sub${parts.paramIdx}`,
47
- where: [],
48
- groupBy: [],
49
- orderBy: [],
50
- limit: null,
51
- offset: null,
52
- values: [...parts.values],
53
- paramIdx: parts.paramIdx,
54
- grouped: false,
55
- };
56
- }
57
-
58
- function buildSql(parts: QueryParts): string {
59
- let sql = `SELECT ${parts.select} FROM ${parts.from}`;
60
- if (parts.where.length > 0) {
61
- sql += ` WHERE ${parts.where.join(' AND ')}`;
62
- }
63
- if (parts.groupBy.length > 0) {
64
- sql += ` GROUP BY ${parts.groupBy.join(', ')}`;
65
- }
66
- if (parts.orderBy.length > 0) {
67
- sql += ` ORDER BY ${parts.orderBy.join(', ')}`;
68
- }
69
- if (parts.limit !== null) {
70
- sql += ` LIMIT ${parts.limit}`;
71
- }
72
- if (parts.offset !== null) {
73
- sql += ` OFFSET ${parts.offset}`;
74
- }
75
- return sql;
76
- }
77
-
78
- function applyMatch(parts: QueryParts, match: Record<string, unknown>): void {
79
- if (parts.grouped) {
80
- // After a $group, wrap as subquery and match against aliased columns
81
- const wrapped = wrapAsSubquery(parts);
82
- Object.assign(parts, wrapped);
83
- }
84
-
85
- const { where, values } = translateFilter(match);
86
- if (!where) return;
87
-
88
- // Rewrite param placeholders from $1-based to current paramIdx
89
- let rewritten = where;
90
- // Replace from highest to lowest to avoid $1 matching $10, $11, etc.
91
- for (let i = values.length; i >= 1; i--) {
92
- const newIdx = parts.paramIdx + i - 1;
93
- rewritten = rewritten.replace(new RegExp(`\\$${i}(?!\\d)`, 'g'), `$${newIdx}`);
94
- }
95
-
96
- parts.where.push(rewritten);
97
- parts.values.push(...values);
98
- parts.paramIdx += values.length;
99
- }
100
-
101
- function applySort(parts: QueryParts, sort: Record<string, number>): void {
102
- if (parts.grouped) {
103
- // After $group, sort uses aliased column names directly
104
- const wrapped = wrapAsSubquery(parts);
105
- Object.assign(parts, wrapped);
106
- }
107
-
108
- parts.orderBy = [];
109
- for (const [field, dir] of Object.entries(sort)) {
110
- if (parts.from.includes('_sub')) {
111
- // Sorting on a subquery — use column names directly
112
- parts.orderBy.push(`"${field}" ${dir === 1 ? 'ASC' : 'DESC'}`);
113
- } else {
114
- const accessor = field === '_id' ? '"_id"' : jsonbPath(field);
115
- parts.orderBy.push(`${accessor} ${dir === 1 ? 'ASC' : 'DESC'}`);
116
- }
117
- }
118
- }
119
-
120
- function applyLimit(parts: QueryParts, limit: number): void {
121
- parts.limit = limit;
122
- }
123
-
124
- function applySkip(parts: QueryParts, skip: number): void {
125
- parts.offset = skip;
126
- }
127
-
128
- function applyCount(parts: QueryParts, alias: string): void {
129
- const innerSql = buildSql(parts);
130
- parts.select = `COUNT(*) AS "${alias}"`;
131
- parts.from = `(${innerSql}) AS _counted`;
132
- parts.where = [];
133
- parts.groupBy = [];
134
- parts.orderBy = [];
135
- parts.limit = null;
136
- parts.offset = null;
137
- parts.grouped = false;
138
- }
139
-
140
- function resolveAccumulator(acc: Record<string, unknown>): string {
141
- const [[op, field]] = Object.entries(acc);
142
- const col = typeof field === 'string' && field.startsWith('$')
143
- ? jsonbPath(field.slice(1))
144
- : String(field);
145
-
146
- switch (op) {
147
- case '$sum':
148
- if (typeof field === 'number') return String(field);
149
- return `SUM((${col})::double precision)`;
150
- case '$avg':
151
- return `AVG((${col})::double precision)`;
152
- case '$min':
153
- return `MIN((${col})::double precision)`;
154
- case '$max':
155
- return `MAX((${col})::double precision)`;
156
- case '$first':
157
- return `(array_agg(${col}))[1]`;
158
- case '$last':
159
- return `(array_agg(${col}))[array_length(array_agg(${col}), 1)]`;
160
- default:
161
- throw new Error(`[DB] Unsupported accumulator: ${op}`);
162
- }
163
- }
164
-
165
- function applyGroup(parts: QueryParts, group: Record<string, unknown>): void {
166
- const groupId = group._id;
167
- const selectParts: string[] = [];
168
- const groupByParts: string[] = [];
169
-
170
- // _id field — the group key
171
- if (groupId === null) {
172
- selectParts.push(`NULL AS "_id"`);
173
- } else if (typeof groupId === 'string' && groupId.startsWith('$')) {
174
- const field = groupId.slice(1);
175
- const accessor = field === '_id' ? '"_id"' : jsonbPath(field);
176
- selectParts.push(`${accessor} AS "_id"`);
177
- groupByParts.push(accessor);
178
- } else {
179
- throw new Error(`[DB] Unsupported $group _id expression: ${JSON.stringify(groupId)}`);
180
- }
181
-
182
- // Accumulators
183
- for (const [alias, expr] of Object.entries(group)) {
184
- if (alias === '_id') continue;
185
- const resolved = resolveAccumulator(expr as Record<string, unknown>);
186
- selectParts.push(`${resolved} AS "${alias}"`);
187
- }
188
-
189
- parts.select = selectParts.join(', ');
190
- parts.groupBy = groupByParts;
191
- parts.grouped = true;
192
- }
193
-
194
- export function translatePipeline(
195
- collection: string,
196
- pipeline: Record<string, unknown>[],
197
- options?: { skip?: number; limit?: number },
198
- ): TranslatedPipeline {
199
- const parts: QueryParts = {
200
- select: '_id, data, created, updated, version',
201
- from: `"${collection}"`,
202
- where: [],
203
- groupBy: [],
204
- orderBy: [],
205
- limit: null,
206
- offset: null,
207
- values: [],
208
- paramIdx: 1,
209
- grouped: false,
210
- };
211
-
212
- for (const stage of pipeline) {
213
- const [[key, value]] = Object.entries(stage);
214
- switch (key) {
215
- case '$match':
216
- applyMatch(parts, value as Record<string, unknown>);
217
- break;
218
- case '$sort':
219
- applySort(parts, value as Record<string, number>);
220
- break;
221
- case '$limit':
222
- applyLimit(parts, value as number);
223
- break;
224
- case '$skip':
225
- applySkip(parts, value as number);
226
- break;
227
- case '$count':
228
- applyCount(parts, value as string);
229
- break;
230
- case '$group':
231
- applyGroup(parts, value as Record<string, unknown>);
232
- break;
233
- // Stages that are sanitized by StoreHandlers but not translatable to SQL;
234
- // treat as no-ops so queries resolve without error.
235
- case '$addFields':
236
- case '$set':
237
- case '$project':
238
- case '$unwind':
239
- case '$sample':
240
- case '$replaceRoot':
241
- case '$replaceWith':
242
- case '$bucket':
243
- case '$bucketAuto':
244
- break;
245
- default:
246
- throw new Error(`[DB] Unsupported pipeline stage: ${key}`);
247
- }
248
- }
249
-
250
- // Apply options (limit/skip) after pipeline stages
251
- if (options?.skip !== undefined) {
252
- parts.offset = options.skip;
253
- }
254
- if (options?.limit !== undefined) {
255
- parts.limit = options.limit;
256
- }
257
-
258
- return {
259
- sql: buildSql(parts),
260
- values: parts.values,
261
- };
262
- }
@@ -1,51 +0,0 @@
1
- import { query } from './Postgres.js';
2
-
3
- export async function ensureTable(collection: string): Promise<void> {
4
- // Validate collection name (alphanumeric + underscore only)
5
- if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(collection)) {
6
- throw new Error(`[DB] Invalid collection name: ${collection}`);
7
- }
8
-
9
- await query(`
10
- CREATE TABLE IF NOT EXISTS "${collection}" (
11
- _id TEXT PRIMARY KEY,
12
- data JSONB NOT NULL,
13
- created TIMESTAMPTZ NOT NULL DEFAULT now(),
14
- updated TIMESTAMPTZ NOT NULL DEFAULT now(),
15
- version INTEGER NOT NULL DEFAULT 1
16
- )
17
- `);
18
-
19
- // GIN index for JSONB queries
20
- await query(`
21
- CREATE INDEX IF NOT EXISTS "idx_${collection}_data"
22
- ON "${collection}" USING GIN (data)
23
- `);
24
- }
25
-
26
- export async function ensureSearchColumn(
27
- collection: string,
28
- fields: string[],
29
- _language = 'english',
30
- ): Promise<void> {
31
- await query(`
32
- ALTER TABLE "${collection}"
33
- ADD COLUMN IF NOT EXISTS search TSVECTOR
34
- `);
35
-
36
- await query(`
37
- CREATE INDEX IF NOT EXISTS "idx_${collection}_search"
38
- ON "${collection}" USING GIN (search)
39
- `);
40
- }
41
-
42
- export async function tableExists(collection: string): Promise<boolean> {
43
- const result = await query<{ exists: boolean }>(
44
- `SELECT EXISTS (
45
- SELECT FROM information_schema.tables
46
- WHERE table_name = $1
47
- ) as exists`,
48
- [collection],
49
- );
50
- return result.rows[0].exists;
51
- }