ugly-app 0.1.310 → 0.1.312
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/authCommands.js +1 -1
- package/dist/cli/authCommands.js.map +1 -1
- package/dist/cli/dev.d.ts.map +1 -1
- package/dist/cli/dev.js +0 -25
- package/dist/cli/dev.js.map +1 -1
- package/dist/cli/migrate.js +1 -1
- package/dist/cli/migrate.js.map +1 -1
- package/dist/cli/scaffold.d.ts.map +1 -1
- package/dist/cli/scaffold.js +4 -1
- package/dist/cli/scaffold.js.map +1 -1
- package/dist/cli/serverLogQuery.js +1 -1
- package/dist/cli/serverLogQuery.js.map +1 -1
- package/dist/cli/version.d.ts +1 -1
- package/dist/cli/version.js +1 -1
- package/dist/server/App.d.ts.map +1 -1
- package/dist/server/App.js +1 -5
- package/dist/server/App.js.map +1 -1
- package/dist/server/DataProxyClient.d.ts +1 -1
- package/dist/server/DataProxyClient.d.ts.map +1 -1
- package/dist/server/DataProxyClient.js +5 -10
- package/dist/server/DataProxyClient.js.map +1 -1
- package/dist/server/Logging.d.ts +1 -1
- package/dist/server/Logging.d.ts.map +1 -1
- package/dist/server/SchemaCheck.d.ts.map +1 -1
- package/dist/server/SchemaCheck.js +0 -1
- package/dist/server/SchemaCheck.js.map +1 -1
- package/dist/server/Socket.d.ts +1 -1
- package/dist/server/Socket.d.ts.map +1 -1
- package/eslint.config.js +1 -0
- package/package.json +1 -3
- package/src/cli/authCommands.ts +1 -1
- package/src/cli/dev.ts +0 -30
- package/src/cli/migrate.ts +1 -1
- package/src/cli/scaffold.ts +4 -1
- package/src/cli/serverLogQuery.ts +1 -1
- package/src/cli/version.ts +1 -1
- package/src/server/App.ts +1 -5
- package/src/server/DataProxyClient.ts +6 -12
- package/src/server/Logging.ts +1 -1
- package/src/server/SchemaCheck.ts +0 -1
- package/src/server/Socket.ts +1 -1
- package/src/server/Postgres.ts +0 -29
- package/src/server/PostgresDB.ts +0 -263
- package/src/server/PostgresFilter.ts +0 -162
- package/src/server/PostgresOperators.ts +0 -136
- package/src/server/PostgresPipeline.ts +0 -262
- package/src/server/PostgresSchema.ts +0 -51
- package/src/server/PostgresSearch.ts +0 -109
- package/src/server/Qdrant.ts +0 -110
- package/src/server/Storage.ts +0 -135
|
@@ -1,109 +0,0 @@
|
|
|
1
|
-
import type { DBObject } from '../shared/DB.js';
|
|
2
|
-
import { query } from './Postgres.js';
|
|
3
|
-
import { translateFilter } from './PostgresFilter.js';
|
|
4
|
-
|
|
5
|
-
export async function ensureSearchColumn(
|
|
6
|
-
collection: string,
|
|
7
|
-
fields: string[],
|
|
8
|
-
_language = 'english',
|
|
9
|
-
): Promise<void> {
|
|
10
|
-
await query(`
|
|
11
|
-
ALTER TABLE "${collection}"
|
|
12
|
-
ADD COLUMN IF NOT EXISTS search TSVECTOR
|
|
13
|
-
`);
|
|
14
|
-
|
|
15
|
-
await query(`
|
|
16
|
-
CREATE INDEX IF NOT EXISTS "idx_${collection}_search"
|
|
17
|
-
ON "${collection}" USING GIN (search)
|
|
18
|
-
`);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
interface FullRow { _id: string; data: Record<string, unknown>; created: Date; updated: Date; version: number }
|
|
22
|
-
|
|
23
|
-
function toDoc<T extends DBObject>(row: FullRow): T {
|
|
24
|
-
return {
|
|
25
|
-
...row.data,
|
|
26
|
-
_id: row._id,
|
|
27
|
-
created: row.created,
|
|
28
|
-
updated: row.updated,
|
|
29
|
-
version: row.version,
|
|
30
|
-
} as unknown as T;
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
export async function updateSearchColumn(
|
|
34
|
-
collection: string,
|
|
35
|
-
id: string,
|
|
36
|
-
fields: string[],
|
|
37
|
-
language = 'english',
|
|
38
|
-
): Promise<void> {
|
|
39
|
-
const fieldExprs = fields.map((f) => `COALESCE(data->>'${f}', '')`);
|
|
40
|
-
const concat = fieldExprs.join(` || ' ' || `);
|
|
41
|
-
|
|
42
|
-
await query(
|
|
43
|
-
`UPDATE "${collection}"
|
|
44
|
-
SET search = to_tsvector($1::regconfig, ${concat})
|
|
45
|
-
WHERE _id = $2`,
|
|
46
|
-
[language, id],
|
|
47
|
-
);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
export async function pgSearchDocs<T extends DBObject>(
|
|
51
|
-
collection: string,
|
|
52
|
-
searchQuery: string,
|
|
53
|
-
options?: { limit?: number; filter?: Record<string, unknown> },
|
|
54
|
-
): Promise<T[]> {
|
|
55
|
-
// Build an OR-based tsquery so that documents matching any term are returned,
|
|
56
|
-
// while ts_rank still rewards documents that match more terms.
|
|
57
|
-
const terms = searchQuery
|
|
58
|
-
.trim()
|
|
59
|
-
.split(/\s+/)
|
|
60
|
-
.filter(Boolean)
|
|
61
|
-
.map((t) => t.replace(/[^a-zA-Z0-9]/g, ''))
|
|
62
|
-
.filter(Boolean);
|
|
63
|
-
|
|
64
|
-
const tsqueryExpr =
|
|
65
|
-
terms.length > 0
|
|
66
|
-
? terms.map((_, i) => `plainto_tsquery('english', $${i + 1})`).join(' || ')
|
|
67
|
-
: `plainto_tsquery('english', $1)`;
|
|
68
|
-
|
|
69
|
-
const tsrankExpr =
|
|
70
|
-
terms.length > 0
|
|
71
|
-
? terms.map((_, i) => `plainto_tsquery('english', $${i + 1})`).join(' || ')
|
|
72
|
-
: `plainto_tsquery('english', $1)`;
|
|
73
|
-
|
|
74
|
-
const values: unknown[] = terms.length > 0 ? [...terms] : [searchQuery];
|
|
75
|
-
let paramIdx = values.length + 1;
|
|
76
|
-
|
|
77
|
-
let sql = `
|
|
78
|
-
SELECT _id, data, created, updated, version,
|
|
79
|
-
ts_rank(search, ${tsrankExpr}) AS rank
|
|
80
|
-
FROM "${collection}"
|
|
81
|
-
WHERE search @@ (${tsqueryExpr})
|
|
82
|
-
`;
|
|
83
|
-
|
|
84
|
-
if (options?.filter) {
|
|
85
|
-
const { where, values: filterValues } = translateFilter(options.filter);
|
|
86
|
-
if (where) {
|
|
87
|
-
let rewritten = where;
|
|
88
|
-
for (let i = filterValues.length; i >= 1; i--) {
|
|
89
|
-
rewritten = rewritten.replace(
|
|
90
|
-
new RegExp(`\\$${i}(?!\\d)`, 'g'),
|
|
91
|
-
`$${paramIdx + i - 1}`,
|
|
92
|
-
);
|
|
93
|
-
}
|
|
94
|
-
sql += ` AND ${rewritten}`;
|
|
95
|
-
values.push(...filterValues);
|
|
96
|
-
paramIdx += filterValues.length;
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
sql += ` ORDER BY rank DESC`;
|
|
101
|
-
|
|
102
|
-
if (options?.limit) {
|
|
103
|
-
sql += ` LIMIT $${paramIdx}`;
|
|
104
|
-
values.push(options.limit);
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
const result = await query<FullRow>(sql, values);
|
|
108
|
-
return result.rows.map((row) => toDoc<T>(row));
|
|
109
|
-
}
|
package/src/server/Qdrant.ts
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
import { createHash } from 'crypto';
|
|
2
|
-
import { QdrantClient } from '@qdrant/js-client-rest';
|
|
3
|
-
|
|
4
|
-
let _client: QdrantClient | null = null;
|
|
5
|
-
|
|
6
|
-
let _collectionPrefix = '';
|
|
7
|
-
|
|
8
|
-
export function setQdrantPrefix(prefix: string): void {
|
|
9
|
-
_collectionPrefix = prefix ? `${prefix}_` : '';
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
function prefixed(name: string): string {
|
|
13
|
-
return `${_collectionPrefix}${name}`;
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
/** Convert an arbitrary string ID to a deterministic UUID (v5-like SHA-1 based). */
|
|
17
|
-
function toUUID(id: string): string {
|
|
18
|
-
const hash = createHash('sha1').update(id).digest('hex');
|
|
19
|
-
return [
|
|
20
|
-
hash.slice(0, 8),
|
|
21
|
-
hash.slice(8, 12),
|
|
22
|
-
'5' + hash.slice(13, 16),
|
|
23
|
-
((parseInt(hash.slice(16, 18), 16) & 0x3f) | 0x80).toString(16).padStart(2, '0') + hash.slice(18, 20),
|
|
24
|
-
hash.slice(20, 32),
|
|
25
|
-
].join('-');
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
export function initQdrant(url: string): void {
|
|
29
|
-
_client = new QdrantClient({ url });
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
export function getQdrantClient(): QdrantClient {
|
|
33
|
-
if (!_client) throw new Error('[Qdrant] Not initialized — call initQdrant first');
|
|
34
|
-
return _client;
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
export function stopQdrant(): void {
|
|
38
|
-
_client = null;
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
export async function ensureQdrantCollection(
|
|
42
|
-
name: string,
|
|
43
|
-
dimensions: number,
|
|
44
|
-
): Promise<void> {
|
|
45
|
-
const client = getQdrantClient();
|
|
46
|
-
const collections = await client.getCollections();
|
|
47
|
-
const exists = collections.collections.some((c) => c.name === prefixed(name));
|
|
48
|
-
if (exists) return;
|
|
49
|
-
|
|
50
|
-
await client.createCollection(prefixed(name), {
|
|
51
|
-
vectors: {
|
|
52
|
-
size: dimensions,
|
|
53
|
-
distance: 'Cosine',
|
|
54
|
-
on_disk: true,
|
|
55
|
-
},
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
export async function deleteQdrantCollection(name: string): Promise<void> {
|
|
60
|
-
const client = getQdrantClient();
|
|
61
|
-
try {
|
|
62
|
-
await client.deleteCollection(prefixed(name));
|
|
63
|
-
} catch {
|
|
64
|
-
// Collection may not exist
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
export async function upsertVector(
|
|
69
|
-
collection: string,
|
|
70
|
-
id: string,
|
|
71
|
-
vector: number[],
|
|
72
|
-
payload: Record<string, unknown> = {},
|
|
73
|
-
): Promise<void> {
|
|
74
|
-
const client = getQdrantClient();
|
|
75
|
-
await client.upsert(prefixed(collection), {
|
|
76
|
-
points: [{ id: toUUID(id), vector, payload: { ...payload, _id: id } }],
|
|
77
|
-
});
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
export async function deleteVector(
|
|
81
|
-
collection: string,
|
|
82
|
-
id: string,
|
|
83
|
-
): Promise<void> {
|
|
84
|
-
const client = getQdrantClient();
|
|
85
|
-
await client.delete(prefixed(collection), {
|
|
86
|
-
points: [toUUID(id)],
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
export interface VectorSearchResult {
|
|
91
|
-
id: string;
|
|
92
|
-
score: number;
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
export async function searchVectors(
|
|
96
|
-
collection: string,
|
|
97
|
-
vector: number[],
|
|
98
|
-
limit: number,
|
|
99
|
-
): Promise<VectorSearchResult[]> {
|
|
100
|
-
const client = getQdrantClient();
|
|
101
|
-
const results = await client.search(prefixed(collection), {
|
|
102
|
-
vector,
|
|
103
|
-
limit,
|
|
104
|
-
with_payload: true,
|
|
105
|
-
});
|
|
106
|
-
return results.map((r) => ({
|
|
107
|
-
id: String((r.payload as Record<string, unknown>)?._id ?? r.id),
|
|
108
|
-
score: r.score,
|
|
109
|
-
}));
|
|
110
|
-
}
|
package/src/server/Storage.ts
DELETED
|
@@ -1,135 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
CopyObjectCommand,
|
|
3
|
-
DeleteObjectCommand,
|
|
4
|
-
PutObjectCommand,
|
|
5
|
-
S3Client,
|
|
6
|
-
} from '@aws-sdk/client-s3';
|
|
7
|
-
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
|
8
|
-
|
|
9
|
-
let _storagePrefix = '';
|
|
10
|
-
|
|
11
|
-
export function setStoragePrefix(prefix: string): void {
|
|
12
|
-
_storagePrefix = prefix;
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
function prefixKey(key: string): string {
|
|
16
|
-
return _storagePrefix ? `${_storagePrefix}/${key}` : key;
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
function isDev(): boolean {
|
|
20
|
-
return process.env.NODE_ENV !== 'production';
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
function getS3Client(): S3Client {
|
|
24
|
-
// Always connect to localhost:9000 — MinIO in dev, sidecar R2 proxy in prod.
|
|
25
|
-
// The sidecar handles R2 auth and bucket isolation transparently.
|
|
26
|
-
return new S3Client({
|
|
27
|
-
region: 'us-east-1',
|
|
28
|
-
endpoint: process.env['MINIO_ENDPOINT'] ?? 'http://localhost:9000',
|
|
29
|
-
credentials: { accessKeyId: 'minioadmin', secretAccessKey: 'minioadmin' },
|
|
30
|
-
forcePathStyle: true,
|
|
31
|
-
requestChecksumCalculation: 'WHEN_REQUIRED',
|
|
32
|
-
responseChecksumValidation: 'WHEN_REQUIRED',
|
|
33
|
-
});
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
function getBucketName(bucket: 'public' | 'temp'): string {
|
|
37
|
-
return bucket;
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
function getBaseUrl(bucket: 'public' | 'temp'): string {
|
|
41
|
-
if (isDev()) {
|
|
42
|
-
const endpoint = process.env['MINIO_ENDPOINT'] ?? 'http://localhost:9000';
|
|
43
|
-
return `${endpoint}/${bucket}/`;
|
|
44
|
-
}
|
|
45
|
-
// In production, the sidecar injects STORAGE_PUBLIC_URL / STORAGE_TEMP_URL
|
|
46
|
-
const url = bucket === 'public'
|
|
47
|
-
? process.env['STORAGE_PUBLIC_URL']
|
|
48
|
-
: process.env['STORAGE_TEMP_URL'];
|
|
49
|
-
if (!url) {
|
|
50
|
-
// Fall back to sidecar endpoint if URLs not injected
|
|
51
|
-
const endpoint = process.env['MINIO_ENDPOINT'] ?? 'http://localhost:9000';
|
|
52
|
-
return `${endpoint}/${bucket}/`;
|
|
53
|
-
}
|
|
54
|
-
return url.endsWith('/') ? url : url + '/';
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
export interface StorageClient {
|
|
58
|
-
put(
|
|
59
|
-
bucket: 'public' | 'temp',
|
|
60
|
-
key: string,
|
|
61
|
-
body: Buffer,
|
|
62
|
-
contentType: string,
|
|
63
|
-
): Promise<string>;
|
|
64
|
-
moveToPublic(tempKey: string, destKey: string): Promise<string>;
|
|
65
|
-
url(bucket: 'public' | 'temp', key: string): string;
|
|
66
|
-
presignedPut(
|
|
67
|
-
bucket: 'temp',
|
|
68
|
-
key: string,
|
|
69
|
-
): Promise<{ uploadUrl: string; resultUrl: string }>;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
/**
|
|
73
|
-
* Creates a storage client backed by MinIO (dev) or the sidecar R2 proxy (production).
|
|
74
|
-
* Both listen on localhost:9000 — the sidecar handles R2 auth and bucket isolation.
|
|
75
|
-
*/
|
|
76
|
-
export function createStorageClient(): StorageClient {
|
|
77
|
-
const s3 = getS3Client();
|
|
78
|
-
|
|
79
|
-
return {
|
|
80
|
-
async put(bucket, key, body, contentType) {
|
|
81
|
-
const effectiveKey = prefixKey(key);
|
|
82
|
-
await s3.send(
|
|
83
|
-
new PutObjectCommand({
|
|
84
|
-
Bucket: getBucketName(bucket),
|
|
85
|
-
Key: effectiveKey,
|
|
86
|
-
Body: body,
|
|
87
|
-
ContentType: contentType,
|
|
88
|
-
}),
|
|
89
|
-
);
|
|
90
|
-
return getBaseUrl(bucket) + effectiveKey;
|
|
91
|
-
},
|
|
92
|
-
|
|
93
|
-
async moveToPublic(tempKey, destKey) {
|
|
94
|
-
const srcBucket = getBucketName('temp');
|
|
95
|
-
const destBucket = getBucketName('public');
|
|
96
|
-
const effectiveTempKey = prefixKey(tempKey);
|
|
97
|
-
const effectiveDestKey = prefixKey(destKey);
|
|
98
|
-
await s3.send(
|
|
99
|
-
new CopyObjectCommand({
|
|
100
|
-
CopySource: `${srcBucket}/${effectiveTempKey}`,
|
|
101
|
-
Bucket: destBucket,
|
|
102
|
-
Key: effectiveDestKey,
|
|
103
|
-
}),
|
|
104
|
-
);
|
|
105
|
-
await s3.send(
|
|
106
|
-
new DeleteObjectCommand({ Bucket: srcBucket, Key: effectiveTempKey }),
|
|
107
|
-
);
|
|
108
|
-
return getBaseUrl('public') + effectiveDestKey;
|
|
109
|
-
},
|
|
110
|
-
|
|
111
|
-
url(bucket, key) {
|
|
112
|
-
return getBaseUrl(bucket) + prefixKey(key);
|
|
113
|
-
},
|
|
114
|
-
|
|
115
|
-
async presignedPut(bucket, key) {
|
|
116
|
-
const effectiveKey = prefixKey(key);
|
|
117
|
-
const command = new PutObjectCommand({
|
|
118
|
-
Bucket: getBucketName(bucket),
|
|
119
|
-
Key: effectiveKey,
|
|
120
|
-
});
|
|
121
|
-
const uploadUrl = await getSignedUrl(s3, command, { expiresIn: 3600 });
|
|
122
|
-
|
|
123
|
-
// In dev, rewrite the presigned URL to go through the Express proxy
|
|
124
|
-
// so the browser doesn't hit a cross-origin S3 endpoint (CORS blocked).
|
|
125
|
-
let proxiedUploadUrl = uploadUrl;
|
|
126
|
-
if (isDev()) {
|
|
127
|
-
const parsed = new URL(uploadUrl);
|
|
128
|
-
proxiedUploadUrl = `/_s3${parsed.pathname}${parsed.search}`;
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
const resultUrl = getBaseUrl(bucket) + effectiveKey;
|
|
132
|
-
return { uploadUrl: proxiedUploadUrl, resultUrl };
|
|
133
|
-
},
|
|
134
|
-
};
|
|
135
|
-
}
|