@evalstudio/postgres 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +35 -0
- package/dist/index.js.map +1 -0
- package/dist/pool.d.ts +8 -0
- package/dist/pool.d.ts.map +1 -0
- package/dist/pool.js +10 -0
- package/dist/pool.js.map +1 -0
- package/dist/postgres-repository.d.ts +12 -0
- package/dist/postgres-repository.d.ts.map +1 -0
- package/dist/postgres-repository.js +89 -0
- package/dist/postgres-repository.js.map +1 -0
- package/dist/postgres-storage.d.ts +10 -0
- package/dist/postgres-storage.d.ts.map +1 -0
- package/dist/postgres-storage.js +106 -0
- package/dist/postgres-storage.js.map +1 -0
- package/dist/schema.d.ts +11 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +93 -0
- package/dist/schema.js.map +1 -0
- package/package.json +53 -0
package/README.md
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# @evalstudio/postgres
|
|
2
|
+
|
|
3
|
+
PostgreSQL storage backend for [EvalStudio](https://github.com/Treatwell-AI/evalstudio). Replaces the default filesystem storage with PostgreSQL for team environments, production deployments, and horizontal scaling.
|
|
4
|
+
|
|
5
|
+
## Setting Up an EvalStudio Server
|
|
6
|
+
|
|
7
|
+
This guide walks through creating a standalone Node.js project that runs EvalStudio with PostgreSQL storage.
|
|
8
|
+
|
|
9
|
+
### 1. Create the project
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
mkdir my-evalstudio-server
|
|
13
|
+
cd my-evalstudio-server
|
|
14
|
+
npm init -y
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
### 2. Install dependencies
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
npm install @evalstudio/cli @evalstudio/postgres
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
### 3. Initialize the project
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
npx evalstudio init
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
This creates an `evalstudio.config.json` in the current directory.
|
|
30
|
+
|
|
31
|
+
### 4. Configure PostgreSQL storage
|
|
32
|
+
|
|
33
|
+
Edit `evalstudio.config.json` to use Postgres:
|
|
34
|
+
|
|
35
|
+
```json
|
|
36
|
+
{
|
|
37
|
+
"version": 3,
|
|
38
|
+
"storage": {
|
|
39
|
+
"type": "postgres",
|
|
40
|
+
"connectionString": "${EVALSTUDIO_DATABASE_URL}"
|
|
41
|
+
},
|
|
42
|
+
"llmSettings": {
|
|
43
|
+
"provider": "openai",
|
|
44
|
+
"apiKey": "your-api-key",
|
|
45
|
+
"models": {
|
|
46
|
+
"evaluation": "gpt-4o",
|
|
47
|
+
"persona": "gpt-4o-mini"
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
The `connectionString` supports `${VAR}` placeholders that resolve from environment variables at runtime. If `connectionString` is omitted entirely, it falls back to the `EVALSTUDIO_DATABASE_URL` environment variable.
|
|
54
|
+
|
|
55
|
+
### 5. Add a start script
|
|
56
|
+
|
|
57
|
+
Update your `package.json`:
|
|
58
|
+
|
|
59
|
+
```json
|
|
60
|
+
{
|
|
61
|
+
"scripts": {
|
|
62
|
+
"start": "evalstudio serve",
|
|
63
|
+
"db:init": "evalstudio db init"
|
|
64
|
+
},
|
|
65
|
+
"dependencies": {
|
|
66
|
+
"@evalstudio/cli": "latest",
|
|
67
|
+
"@evalstudio/postgres": "latest"
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
### 6. Initialize the database
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
export EVALSTUDIO_DATABASE_URL="postgresql://user:pass@localhost:5432/evalstudio"
|
|
76
|
+
npm run db:init
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### 7. Start the server
|
|
80
|
+
|
|
81
|
+
```bash
|
|
82
|
+
npm start
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
This serves both the API and Web UI on port 3000 (configurable with `--port` or the `EVALSTUDIO_PORT` env var).
|
|
86
|
+
|
|
87
|
+
## Docker
|
|
88
|
+
|
|
89
|
+
Example `Dockerfile` for deploying an EvalStudio server with PostgreSQL:
|
|
90
|
+
|
|
91
|
+
```dockerfile
|
|
92
|
+
FROM node:20-slim
|
|
93
|
+
|
|
94
|
+
WORKDIR /app
|
|
95
|
+
|
|
96
|
+
COPY package.json package-lock.json ./
|
|
97
|
+
RUN npm ci --omit=dev
|
|
98
|
+
|
|
99
|
+
COPY evalstudio.config.json ./
|
|
100
|
+
|
|
101
|
+
# Initialize the database schema, then start the server
|
|
102
|
+
CMD npm run db:init && npm start
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
Build and run:
|
|
106
|
+
|
|
107
|
+
```bash
|
|
108
|
+
docker build -t evalstudio-server .
|
|
109
|
+
docker run -p 3000:3000 \
|
|
110
|
+
-e EVALSTUDIO_DATABASE_URL="postgresql://user:pass@host:5432/evalstudio" \
|
|
111
|
+
evalstudio-server
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
`db:init` is idempotent — it's safe to run on every container start. The schema is created if it doesn't exist and left untouched if it does.
|
|
115
|
+
|
|
116
|
+
## How It Works
|
|
117
|
+
|
|
118
|
+
When `storage.type` is set to `"postgres"` in your config, `@evalstudio/core` dynamically imports `@evalstudio/postgres` at startup. No code changes are needed — just install the package and update the config.
|
|
119
|
+
|
|
120
|
+
If the package is not installed, you'll get a clear error message telling you to add it.
|
|
121
|
+
|
|
122
|
+
## API
|
|
123
|
+
|
|
124
|
+
### `createPostgresStorage(connectionString: string): Promise<StorageProvider>`
|
|
125
|
+
|
|
126
|
+
Creates a PostgreSQL-backed storage provider. The database schema must already exist (run `evalstudio db init` first). The connection is verified immediately so bad credentials fail at startup.
|
|
127
|
+
|
|
128
|
+
### `initSchema(connectionString: string): Promise<void>`
|
|
129
|
+
|
|
130
|
+
Creates all required database tables. Used internally by the `evalstudio db init` CLI command.
|
|
131
|
+
|
|
132
|
+
## License
|
|
133
|
+
|
|
134
|
+
MIT
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { StorageProvider } from "@evalstudio/core";
|
|
2
|
+
/**
|
|
3
|
+
* Creates a PostgreSQL-backed StorageProvider.
|
|
4
|
+
*
|
|
5
|
+
* The database schema must already exist — run `evalstudio db init` first.
|
|
6
|
+
* The connection pool is shared across all repositories.
|
|
7
|
+
*
|
|
8
|
+
* @param connectionString - PostgreSQL connection string
|
|
9
|
+
* @returns StorageProvider backed by PostgreSQL
|
|
10
|
+
*/
|
|
11
|
+
export declare function createPostgresStorage(connectionString: string): Promise<StorageProvider>;
|
|
12
|
+
/**
|
|
13
|
+
* Explicitly initializes the database schema.
|
|
14
|
+
* Used by the `evalstudio db init` CLI command.
|
|
15
|
+
*
|
|
16
|
+
* @param connectionString - PostgreSQL connection string
|
|
17
|
+
*/
|
|
18
|
+
export declare function initSchema(connectionString: string): Promise<void>;
|
|
19
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAKxD;;;;;;;;GAQG;AACH,wBAAsB,qBAAqB,CAAC,gBAAgB,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAQ9F;AAED;;;;;GAKG;AACH,wBAAsB,UAAU,CAAC,gBAAgB,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAOxE"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { createPool } from "./pool.js";
|
|
2
|
+
import { createPostgresStorageProvider } from "./postgres-storage.js";
|
|
3
|
+
import { initSchema as initSchemaImpl } from "./schema.js";
|
|
4
|
+
/**
|
|
5
|
+
* Creates a PostgreSQL-backed StorageProvider.
|
|
6
|
+
*
|
|
7
|
+
* The database schema must already exist — run `evalstudio db init` first.
|
|
8
|
+
* The connection pool is shared across all repositories.
|
|
9
|
+
*
|
|
10
|
+
* @param connectionString - PostgreSQL connection string
|
|
11
|
+
* @returns StorageProvider backed by PostgreSQL
|
|
12
|
+
*/
|
|
13
|
+
export async function createPostgresStorage(connectionString) {
|
|
14
|
+
const pool = createPool(connectionString);
|
|
15
|
+
// Verify connection immediately so bad credentials fail at startup
|
|
16
|
+
const client = await pool.connect();
|
|
17
|
+
client.release();
|
|
18
|
+
return createPostgresStorageProvider(pool);
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Explicitly initializes the database schema.
|
|
22
|
+
* Used by the `evalstudio db init` CLI command.
|
|
23
|
+
*
|
|
24
|
+
* @param connectionString - PostgreSQL connection string
|
|
25
|
+
*/
|
|
26
|
+
export async function initSchema(connectionString) {
|
|
27
|
+
const pool = createPool(connectionString);
|
|
28
|
+
try {
|
|
29
|
+
await initSchemaImpl(pool);
|
|
30
|
+
}
|
|
31
|
+
finally {
|
|
32
|
+
await pool.end();
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AACvC,OAAO,EAAE,6BAA6B,EAAE,MAAM,uBAAuB,CAAC;AACtE,OAAO,EAAE,UAAU,IAAI,cAAc,EAAE,MAAM,aAAa,CAAC;AAE3D;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CAAC,gBAAwB;IAClE,MAAM,IAAI,GAAG,UAAU,CAAC,gBAAgB,CAAC,CAAC;IAE1C,mEAAmE;IACnE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;IACpC,MAAM,CAAC,OAAO,EAAE,CAAC;IAEjB,OAAO,6BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,gBAAwB;IACvD,MAAM,IAAI,GAAG,UAAU,CAAC,gBAAgB,CAAC,CAAC;IAC1C,IAAI,CAAC;QACH,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC;IAC7B,CAAC;YAAS,CAAC;QACT,MAAM,IAAI,CAAC,GAAG,EAAE,CAAC;IACnB,CAAC;AACH,CAAC"}
|
package/dist/pool.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
declare const Pool: typeof import("pg").Pool;
|
|
2
|
+
export type { Pool } from "pg";
|
|
3
|
+
/**
|
|
4
|
+
* Creates a pg connection pool from a connection string.
|
|
5
|
+
* The pool is shared across all repositories in a single StorageProvider instance.
|
|
6
|
+
*/
|
|
7
|
+
export declare function createPool(connectionString: string): InstanceType<typeof Pool>;
|
|
8
|
+
//# sourceMappingURL=pool.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../src/pool.ts"],"names":[],"mappings":"AAEA,QAAA,MAAQ,IAAI,0BAAO,CAAC;AACpB,YAAY,EAAE,IAAI,EAAE,MAAM,IAAI,CAAC;AAE/B;;;GAGG;AACH,wBAAgB,UAAU,CAAC,gBAAgB,EAAE,MAAM,GAAG,YAAY,CAAC,OAAO,IAAI,CAAC,CAE9E"}
|
package/dist/pool.js
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import pg from "pg";
|
|
2
|
+
const { Pool } = pg;
|
|
3
|
+
/**
|
|
4
|
+
* Creates a pg connection pool from a connection string.
|
|
5
|
+
* The pool is shared across all repositories in a single StorageProvider instance.
|
|
6
|
+
*/
|
|
7
|
+
export function createPool(connectionString) {
|
|
8
|
+
return new Pool({ connectionString });
|
|
9
|
+
}
|
|
10
|
+
//# sourceMappingURL=pool.js.map
|
package/dist/pool.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pool.js","sourceRoot":"","sources":["../src/pool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,CAAC;AAEpB,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC;AAGpB;;;GAGG;AACH,MAAM,UAAU,UAAU,CAAC,gBAAwB;IACjD,OAAO,IAAI,IAAI,CAAC,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACxC,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { Pool } from "pg";
|
|
2
|
+
import type { Repository } from "@evalstudio/core";
|
|
3
|
+
/**
|
|
4
|
+
* PostgreSQL-backed Repository<T> implementation.
|
|
5
|
+
*
|
|
6
|
+
* Uses the same findAll/saveAll interface as the JSON file repository.
|
|
7
|
+
* Entity data is stored as JSONB in a `data` column. Reference columns
|
|
8
|
+
* (project_id, eval_id, etc.) are duplicated from the data for relational
|
|
9
|
+
* integrity and indexing.
|
|
10
|
+
*/
|
|
11
|
+
export declare function createPostgresRepository<T>(pool: Pool, entity: string, projectId: string): Repository<T>;
|
|
12
|
+
//# sourceMappingURL=postgres-repository.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-repository.d.ts","sourceRoot":"","sources":["../src/postgres-repository.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,IAAI,CAAC;AAC/B,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAuBnD;;;;;;;GAOG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EACxC,IAAI,EAAE,IAAI,EACV,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,MAAM,GAChB,UAAU,CAAC,CAAC,CAAC,CA4Cf"}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maps entity names to their table names.
|
|
3
|
+
* Entity names come from the module factory (e.g., "personas", "scenarios").
|
|
4
|
+
*/
|
|
5
|
+
const TABLE_MAP = {
|
|
6
|
+
personas: "personas",
|
|
7
|
+
scenarios: "scenarios",
|
|
8
|
+
connectors: "connectors",
|
|
9
|
+
evals: "evals",
|
|
10
|
+
runs: "runs",
|
|
11
|
+
executions: "executions",
|
|
12
|
+
};
|
|
13
|
+
function getTableName(entity) {
|
|
14
|
+
const table = TABLE_MAP[entity];
|
|
15
|
+
if (!table) {
|
|
16
|
+
throw new Error(`Unknown entity type: ${entity}`);
|
|
17
|
+
}
|
|
18
|
+
return table;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* PostgreSQL-backed Repository<T> implementation.
|
|
22
|
+
*
|
|
23
|
+
* Uses the same findAll/saveAll interface as the JSON file repository.
|
|
24
|
+
* Entity data is stored as JSONB in a `data` column. Reference columns
|
|
25
|
+
* (project_id, eval_id, etc.) are duplicated from the data for relational
|
|
26
|
+
* integrity and indexing.
|
|
27
|
+
*/
|
|
28
|
+
export function createPostgresRepository(pool, entity, projectId) {
|
|
29
|
+
const table = getTableName(entity);
|
|
30
|
+
return {
|
|
31
|
+
async findAll() {
|
|
32
|
+
const { rows } = await pool.query(`SELECT data FROM ${table} WHERE project_id = $1`, [projectId]);
|
|
33
|
+
return rows.map((r) => r.data);
|
|
34
|
+
},
|
|
35
|
+
async saveAll(items) {
|
|
36
|
+
const client = await pool.connect();
|
|
37
|
+
try {
|
|
38
|
+
await client.query("BEGIN");
|
|
39
|
+
await client.query(`DELETE FROM ${table} WHERE project_id = $1`, [projectId]);
|
|
40
|
+
for (const item of items) {
|
|
41
|
+
const data = item;
|
|
42
|
+
const id = data.id;
|
|
43
|
+
// Build reference columns based on entity type
|
|
44
|
+
const refColumns = extractReferenceColumns(table, data);
|
|
45
|
+
const columns = ["id", "project_id", "data", ...Object.keys(refColumns)];
|
|
46
|
+
const values = [id, projectId, JSON.stringify(item), ...Object.values(refColumns)];
|
|
47
|
+
const placeholders = values.map((_, i) => `$${i + 1}`).join(", ");
|
|
48
|
+
await client.query(`INSERT INTO ${table} (${columns.join(", ")}) VALUES (${placeholders})`, values);
|
|
49
|
+
}
|
|
50
|
+
await client.query("COMMIT");
|
|
51
|
+
}
|
|
52
|
+
catch (e) {
|
|
53
|
+
await client.query("ROLLBACK");
|
|
54
|
+
throw e;
|
|
55
|
+
}
|
|
56
|
+
finally {
|
|
57
|
+
client.release();
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Extracts reference columns from entity data based on the table type.
|
|
64
|
+
* These are duplicated from the JSONB data for relational integrity.
|
|
65
|
+
*/
|
|
66
|
+
function extractReferenceColumns(table, data) {
|
|
67
|
+
switch (table) {
|
|
68
|
+
case "evals":
|
|
69
|
+
return {
|
|
70
|
+
connector_id: data.connectorId ?? null,
|
|
71
|
+
};
|
|
72
|
+
case "executions":
|
|
73
|
+
return {
|
|
74
|
+
eval_id: data.evalId ?? null,
|
|
75
|
+
};
|
|
76
|
+
case "runs":
|
|
77
|
+
return {
|
|
78
|
+
eval_id: data.evalId ?? null,
|
|
79
|
+
scenario_id: data.scenarioId ?? null,
|
|
80
|
+
persona_id: data.personaId ?? null,
|
|
81
|
+
connector_id: data.connectorId ?? null,
|
|
82
|
+
execution_id: data.executionId ?? null,
|
|
83
|
+
status: data.status ?? "queued",
|
|
84
|
+
};
|
|
85
|
+
default:
|
|
86
|
+
return {};
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
//# sourceMappingURL=postgres-repository.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-repository.js","sourceRoot":"","sources":["../src/postgres-repository.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,SAAS,GAA2B;IACxC,QAAQ,EAAE,UAAU;IACpB,SAAS,EAAE,WAAW;IACtB,UAAU,EAAE,YAAY;IACxB,KAAK,EAAE,OAAO;IACd,IAAI,EAAE,MAAM;IACZ,UAAU,EAAE,YAAY;CACzB,CAAC;AAEF,SAAS,YAAY,CAAC,MAAc;IAClC,MAAM,KAAK,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;IAChC,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,EAAE,CAAC,CAAC;IACpD,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,wBAAwB,CACtC,IAAU,EACV,MAAc,EACd,SAAiB;IAEjB,MAAM,KAAK,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC;IAEnC,OAAO;QACL,KAAK,CAAC,OAAO;YACX,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAC/B,oBAAoB,KAAK,wBAAwB,EACjD,CAAC,SAAS,CAAC,CACZ,CAAC;YACF,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAc,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QAC9C,CAAC;QAED,KAAK,CAAC,OAAO,CAAC,KAAU;YACtB,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;YACpC,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBAC5B,MAAM,MAAM,CAAC,KAAK,CAAC,eAAe,KAAK,wBAAwB,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;gBAE9E,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;oBACzB,MAAM,IAAI,GAAG,IAA+B,CAAC;oBAC7C,MAAM,EAAE,GAAG,IAAI,CAAC,EAAY,CAAC;oBAE7B,+CAA+C;oBAC/C,MAAM,UAAU,GAAG,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;oBAExD,MAAM,OAAO,GAAG,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC;oBACzE,MAAM,MAAM,GAAG,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;oBACnF,MAAM,YAAY,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAElE,MAAM,MAAM,CAAC,KAAK,CAChB,eAAe,KAAK,KAAK,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,YAAY,GAAG,EACvE,MAAM,CACP,CAAC;gBACJ,CAAC;gBAED,MAAM,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;YAC/B,CAAC;YAAC,OAAO,CAAC,EAAE,CAAC;gBACX,MAAM,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC/B,MAAM,CAAC,CAAC;YACV,CAAC;oBAAS,CAAC;gBACT,MAAM,CAAC,OAAO,EAAE,CAAC;YACnB,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAC9B,KAAa,EACb,IAA6B;IAE7B,QAAQ,KAAK,EAAE,CAAC;QACd,KAAK,OAAO;YACV,OAAO;gBACL,YAAY,EAAE,IAAI,CAAC,WAAW,IAAI,IAAI;aACvC,CAAC;QACJ,KAAK,YAAY;YACf,OAAO;gBACL,OAAO,EAAE,IAAI,CAAC,MAAM,IAAI,IAAI;aAC7B,CAAC;QACJ,KAAK,MAAM;YACT,OAAO;gBACL,OAAO,EAAE,IAAI,CAAC,MAAM,IAAI,IAAI;gBAC5B,WAAW,EAAE,IAAI,CAAC,UAAU,IAAI,IAAI;gBACpC,UAAU,EAAE,IAAI,CAAC,SAAS,IAAI,IAAI;gBAClC,YAAY,EAAE,IAAI,CAAC,WAAW,IAAI,IAAI;gBACtC,YAAY,EAAE,IAAI,CAAC,WAAW,IAAI,IAAI;gBACtC,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,QAAQ;aAChC,CAAC;QACJ;YACE,OAAO,EAAE,CAAC;IACd,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Pool } from "pg";
|
|
2
|
+
import type { StorageProvider } from "@evalstudio/core";
|
|
3
|
+
/**
|
|
4
|
+
* PostgreSQL-backed StorageProvider implementation.
|
|
5
|
+
*
|
|
6
|
+
* Projects are stored in a `projects` table (not in the config file).
|
|
7
|
+
* Entity data goes into per-entity tables with a `project_id` scope.
|
|
8
|
+
*/
|
|
9
|
+
export declare function createPostgresStorageProvider(pool: Pool): StorageProvider;
|
|
10
|
+
//# sourceMappingURL=postgres-storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-storage.d.ts","sourceRoot":"","sources":["../src/postgres-storage.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,IAAI,CAAC;AAC/B,OAAO,KAAK,EACV,eAAe,EAMhB,MAAM,kBAAkB,CAAC;AAG1B;;;;;GAKG;AACH,wBAAgB,6BAA6B,CAAC,IAAI,EAAE,IAAI,GAAG,eAAe,CA2HzE"}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
import { createPostgresRepository } from "./postgres-repository.js";
|
|
3
|
+
/**
|
|
4
|
+
* PostgreSQL-backed StorageProvider implementation.
|
|
5
|
+
*
|
|
6
|
+
* Projects are stored in a `projects` table (not in the config file).
|
|
7
|
+
* Entity data goes into per-entity tables with a `project_id` scope.
|
|
8
|
+
*/
|
|
9
|
+
export function createPostgresStorageProvider(pool) {
|
|
10
|
+
return {
|
|
11
|
+
createRepository(entity, projectId) {
|
|
12
|
+
return createPostgresRepository(pool, entity, projectId);
|
|
13
|
+
},
|
|
14
|
+
async listProjects() {
|
|
15
|
+
const { rows } = await pool.query("SELECT id, name FROM projects ORDER BY created_at");
|
|
16
|
+
return rows.map((r) => ({
|
|
17
|
+
id: r.id,
|
|
18
|
+
name: r.name,
|
|
19
|
+
}));
|
|
20
|
+
},
|
|
21
|
+
async createProject(name) {
|
|
22
|
+
const id = randomUUID();
|
|
23
|
+
await pool.query("INSERT INTO projects (id, name) VALUES ($1, $2)", [id, name]);
|
|
24
|
+
return { id, name, workspaceDir: "" };
|
|
25
|
+
},
|
|
26
|
+
async deleteProject(projectId) {
|
|
27
|
+
const result = await pool.query("DELETE FROM projects WHERE id = $1", [projectId]);
|
|
28
|
+
if (result.rowCount === 0) {
|
|
29
|
+
throw new Error(`Project "${projectId}" not found`);
|
|
30
|
+
}
|
|
31
|
+
// CASCADE handles entity cleanup
|
|
32
|
+
},
|
|
33
|
+
async getProjectEntry(projectId) {
|
|
34
|
+
const { rows } = await pool.query("SELECT id, name, llm_settings, max_concurrency FROM projects WHERE id = $1", [projectId]);
|
|
35
|
+
if (rows.length === 0) {
|
|
36
|
+
throw new Error(`Project "${projectId}" not found`);
|
|
37
|
+
}
|
|
38
|
+
const row = rows[0];
|
|
39
|
+
return {
|
|
40
|
+
id: row.id,
|
|
41
|
+
name: row.name,
|
|
42
|
+
llmSettings: row.llm_settings ?? undefined,
|
|
43
|
+
maxConcurrency: row.max_concurrency ?? undefined,
|
|
44
|
+
};
|
|
45
|
+
},
|
|
46
|
+
async updateProjectEntry(projectId, input) {
|
|
47
|
+
// Fetch current entry for merging
|
|
48
|
+
const current = await this.getProjectEntry(projectId);
|
|
49
|
+
// Handle llmSettings
|
|
50
|
+
let newLLMSettings;
|
|
51
|
+
if (input.llmSettings === null) {
|
|
52
|
+
newLLMSettings = undefined;
|
|
53
|
+
}
|
|
54
|
+
else if (input.llmSettings !== undefined) {
|
|
55
|
+
// Validate provider
|
|
56
|
+
if (!input.llmSettings.provider) {
|
|
57
|
+
throw new Error("LLM provider type is required");
|
|
58
|
+
}
|
|
59
|
+
// apiKey fallback to existing
|
|
60
|
+
if (!input.llmSettings.apiKey) {
|
|
61
|
+
const existingKey = current.llmSettings?.apiKey;
|
|
62
|
+
if (!existingKey) {
|
|
63
|
+
throw new Error("LLM provider API key is required");
|
|
64
|
+
}
|
|
65
|
+
newLLMSettings = { ...input.llmSettings, apiKey: existingKey };
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
newLLMSettings = input.llmSettings;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
newLLMSettings = current.llmSettings;
|
|
73
|
+
}
|
|
74
|
+
// Handle maxConcurrency
|
|
75
|
+
let newMaxConcurrency;
|
|
76
|
+
if (input.maxConcurrency === null) {
|
|
77
|
+
newMaxConcurrency = undefined;
|
|
78
|
+
}
|
|
79
|
+
else if (input.maxConcurrency !== undefined) {
|
|
80
|
+
if (input.maxConcurrency < 1) {
|
|
81
|
+
throw new Error("maxConcurrency must be at least 1");
|
|
82
|
+
}
|
|
83
|
+
newMaxConcurrency = input.maxConcurrency;
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
newMaxConcurrency = current.maxConcurrency;
|
|
87
|
+
}
|
|
88
|
+
const newName = input.name ?? current.name;
|
|
89
|
+
await pool.query(`UPDATE projects
|
|
90
|
+
SET name = $1, llm_settings = $2, max_concurrency = $3, updated_at = now()
|
|
91
|
+
WHERE id = $4`, [
|
|
92
|
+
newName,
|
|
93
|
+
newLLMSettings ? JSON.stringify(newLLMSettings) : null,
|
|
94
|
+
newMaxConcurrency ?? null,
|
|
95
|
+
projectId,
|
|
96
|
+
]);
|
|
97
|
+
return {
|
|
98
|
+
id: projectId,
|
|
99
|
+
name: newName,
|
|
100
|
+
llmSettings: newLLMSettings,
|
|
101
|
+
maxConcurrency: newMaxConcurrency,
|
|
102
|
+
};
|
|
103
|
+
},
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
//# sourceMappingURL=postgres-storage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-storage.js","sourceRoot":"","sources":["../src/postgres-storage.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAUzC,OAAO,EAAE,wBAAwB,EAAE,MAAM,0BAA0B,CAAC;AAEpE;;;;;GAKG;AACH,MAAM,UAAU,6BAA6B,CAAC,IAAU;IACtD,OAAO;QACL,gBAAgB,CAAI,MAAc,EAAE,SAAiB;YACnD,OAAO,wBAAwB,CAAI,IAAI,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;QAC9D,CAAC;QAED,KAAK,CAAC,YAAY;YAChB,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAC/B,mDAAmD,CACpD,CAAC;YACF,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAA+B,EAAE,EAAE,CAAC,CAAC;gBACpD,EAAE,EAAE,CAAC,CAAC,EAAE;gBACR,IAAI,EAAE,CAAC,CAAC,IAAI;aACb,CAAC,CAAC,CAAC;QACN,CAAC;QAED,KAAK,CAAC,aAAa,CAAC,IAAY;YAC9B,MAAM,EAAE,GAAG,UAAU,EAAE,CAAC;YACxB,MAAM,IAAI,CAAC,KAAK,CACd,iDAAiD,EACjD,CAAC,EAAE,EAAE,IAAI,CAAC,CACX,CAAC;YACF,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC;QACxC,CAAC;QAED,KAAK,CAAC,aAAa,CAAC,SAAiB;YACnC,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,KAAK,CAC7B,oCAAoC,EACpC,CAAC,SAAS,CAAC,CACZ,CAAC;YACF,IAAI,MAAM,CAAC,QAAQ,KAAK,CAAC,EAAE,CAAC;gBAC1B,MAAM,IAAI,KAAK,CAAC,YAAY,SAAS,aAAa,CAAC,CAAC;YACtD,CAAC;YACD,iCAAiC;QACnC,CAAC;QAED,KAAK,CAAC,eAAe,CAAC,SAAiB;YACrC,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAC/B,4EAA4E,EAC5E,CAAC,SAAS,CAAC,CACZ,CAAC;YACF,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBACtB,MAAM,IAAI,KAAK,CAAC,YAAY,SAAS,aAAa,CAAC,CAAC;YACtD,CAAC;YACD,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAKjB,CAAC;YACF,OAAO;gBACL,EAAE,EAAE,GAAG,CAAC,EAAE;gBACV,IAAI,EAAE,GAAG,CAAC,IAAI;gBACd,WAAW,EAAE,GAAG,CAAC,YAAY,IAAI,SAAS;gBAC1C,cAAc,EAAE,GAAG,CAAC,eAAe,IAAI,SAAS;aACjD,CAAC;QACJ,CAAC;QAED,KAAK,CAAC,kBAAkB,CACtB,SAAiB,EACjB,KAA+B;YAE/B,kCAAkC;YAClC,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;YAEtD,qBAAqB;YACrB,IAAI,cAAuC,CAAC;YAC5C,IAAI,KAAK,CAAC,WAAW,KAAK,IAAI,EAAE,CAAC;gBAC/B,cAAc,GAAG,SAAS,CAAC;YAC7B,CAAC;iBAAM,IAAI,KAAK,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;gBAC3C,oBAAoB;gBACpB,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE,CAAC;oBAChC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;gBACnD,CAAC;gBACD,8BAA8B;gBAC9B,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;oBAC9B,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC;oBAChD,IAAI,CAAC,WAAW,EAAE,CAAC;wBACjB,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;oBACtD,CAAC;oBACD,cAAc,GAAG,EAAE,GAAG,KAAK,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,CAAC;gBACjE,CAAC;qBAAM,CAAC;oBACN,cAAc,GAAG,KAAK,CAAC,WAAW,CAAC;gBACrC,CAAC;YACH,CAAC;iBAAM,CAAC;gBACN,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;YACvC,CAAC;YAED,wBAAwB;YACxB,IAAI,iBAAqC,CAAC;YAC1C,IAAI,KAAK,CAAC,cAAc,KAAK,IAAI,EAAE,CAAC;gBAClC,iBAAiB,GAAG,SAAS,CAAC;YAChC,CAAC;iBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,SAAS,EAAE,CAAC;gBAC9C,IAAI,KAAK,CAAC,cAAc,GAAG,CAAC,EAAE,CAAC;oBAC7B,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;gBACvD,CAAC;gBACD,iBAAiB,GAAG,KAAK,CAAC,cAAc,CAAC;YAC3C,CAAC;iBAAM,CAAC;gBACN,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;YAC7C,CAAC;YAED,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;YAE3C,MAAM,IAAI,CAAC,KAAK,CACd;;uBAEe,EACf;gBACE,OAAO;gBACP,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI;gBACtD,iBAAiB,IAAI,IAAI;gBACzB,SAAS;aACV,CACF,CAAC;YAEF,OAAO;gBACL,EAAE,EAAE,SAAS;gBACb,IAAI,EAAE,OAAO;gBACb,WAAW,EAAE,cAAc;gBAC3B,cAAc,EAAE,iBAAiB;aAClC,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC"}
|
package/dist/schema.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { Pool } from "pg";
|
|
2
|
+
/**
|
|
3
|
+
* Initializes the database schema.
|
|
4
|
+
* Uses IF NOT EXISTS so it's safe to call on every startup.
|
|
5
|
+
*/
|
|
6
|
+
export declare function initSchema(pool: Pool): Promise<void>;
|
|
7
|
+
/**
|
|
8
|
+
* Checks if the schema has been initialized (projects table exists).
|
|
9
|
+
*/
|
|
10
|
+
export declare function schemaExists(pool: Pool): Promise<boolean>;
|
|
11
|
+
//# sourceMappingURL=schema.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,IAAI,CAAC;AA8E/B;;;GAGG;AACH,wBAAsB,UAAU,CAAC,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAE1D;AAED;;GAEG;AACH,wBAAsB,YAAY,CAAC,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAQ/D"}
|
package/dist/schema.js
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQL schema for EvalStudio PostgreSQL storage.
|
|
3
|
+
*
|
|
4
|
+
* Design: reference columns (project_id, eval_id, etc.) are real columns
|
|
5
|
+
* with REFERENCES constraints for relational integrity. The rest of the
|
|
6
|
+
* entity payload lives in a JSONB `data` column to avoid mapping every
|
|
7
|
+
* field upfront.
|
|
8
|
+
*/
|
|
9
|
+
const SCHEMA_SQL = `
|
|
10
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
11
|
+
id UUID PRIMARY KEY,
|
|
12
|
+
name TEXT NOT NULL,
|
|
13
|
+
llm_settings JSONB,
|
|
14
|
+
max_concurrency INTEGER,
|
|
15
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
16
|
+
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
17
|
+
);
|
|
18
|
+
|
|
19
|
+
CREATE TABLE IF NOT EXISTS personas (
|
|
20
|
+
id UUID PRIMARY KEY,
|
|
21
|
+
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
22
|
+
data JSONB NOT NULL
|
|
23
|
+
);
|
|
24
|
+
CREATE INDEX IF NOT EXISTS idx_personas_project ON personas(project_id);
|
|
25
|
+
|
|
26
|
+
CREATE TABLE IF NOT EXISTS scenarios (
|
|
27
|
+
id UUID PRIMARY KEY,
|
|
28
|
+
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
29
|
+
data JSONB NOT NULL
|
|
30
|
+
);
|
|
31
|
+
CREATE INDEX IF NOT EXISTS idx_scenarios_project ON scenarios(project_id);
|
|
32
|
+
|
|
33
|
+
CREATE TABLE IF NOT EXISTS connectors (
|
|
34
|
+
id UUID PRIMARY KEY,
|
|
35
|
+
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
36
|
+
data JSONB NOT NULL
|
|
37
|
+
);
|
|
38
|
+
CREATE INDEX IF NOT EXISTS idx_connectors_project ON connectors(project_id);
|
|
39
|
+
|
|
40
|
+
CREATE TABLE IF NOT EXISTS evals (
|
|
41
|
+
id UUID PRIMARY KEY,
|
|
42
|
+
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
43
|
+
connector_id UUID NOT NULL REFERENCES connectors(id),
|
|
44
|
+
data JSONB NOT NULL
|
|
45
|
+
);
|
|
46
|
+
CREATE INDEX IF NOT EXISTS idx_evals_project ON evals(project_id);
|
|
47
|
+
CREATE INDEX IF NOT EXISTS idx_evals_connector ON evals(connector_id);
|
|
48
|
+
|
|
49
|
+
CREATE TABLE IF NOT EXISTS executions (
|
|
50
|
+
id INTEGER NOT NULL,
|
|
51
|
+
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
52
|
+
eval_id UUID NOT NULL REFERENCES evals(id),
|
|
53
|
+
data JSONB NOT NULL,
|
|
54
|
+
PRIMARY KEY (project_id, id)
|
|
55
|
+
);
|
|
56
|
+
CREATE INDEX IF NOT EXISTS idx_executions_project ON executions(project_id);
|
|
57
|
+
CREATE INDEX IF NOT EXISTS idx_executions_eval ON executions(eval_id);
|
|
58
|
+
|
|
59
|
+
CREATE TABLE IF NOT EXISTS runs (
|
|
60
|
+
id UUID PRIMARY KEY,
|
|
61
|
+
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
62
|
+
eval_id UUID REFERENCES evals(id),
|
|
63
|
+
scenario_id UUID NOT NULL REFERENCES scenarios(id),
|
|
64
|
+
persona_id UUID REFERENCES personas(id),
|
|
65
|
+
connector_id UUID REFERENCES connectors(id),
|
|
66
|
+
execution_id INTEGER,
|
|
67
|
+
status TEXT NOT NULL,
|
|
68
|
+
data JSONB NOT NULL
|
|
69
|
+
);
|
|
70
|
+
CREATE INDEX IF NOT EXISTS idx_runs_project ON runs(project_id);
|
|
71
|
+
CREATE INDEX IF NOT EXISTS idx_runs_status ON runs(project_id, status);
|
|
72
|
+
CREATE INDEX IF NOT EXISTS idx_runs_eval ON runs(project_id, eval_id);
|
|
73
|
+
CREATE INDEX IF NOT EXISTS idx_runs_scenario ON runs(project_id, scenario_id);
|
|
74
|
+
CREATE INDEX IF NOT EXISTS idx_runs_execution ON runs(project_id, execution_id);
|
|
75
|
+
`;
|
|
76
|
+
/**
|
|
77
|
+
* Initializes the database schema.
|
|
78
|
+
* Uses IF NOT EXISTS so it's safe to call on every startup.
|
|
79
|
+
*/
|
|
80
|
+
export async function initSchema(pool) {
|
|
81
|
+
await pool.query(SCHEMA_SQL);
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Checks if the schema has been initialized (projects table exists).
|
|
85
|
+
*/
|
|
86
|
+
export async function schemaExists(pool) {
|
|
87
|
+
const result = await pool.query(`SELECT EXISTS (
|
|
88
|
+
SELECT FROM information_schema.tables
|
|
89
|
+
WHERE table_name = 'projects'
|
|
90
|
+
) AS exists`);
|
|
91
|
+
return result.rows[0].exists;
|
|
92
|
+
}
|
|
93
|
+
//# sourceMappingURL=schema.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.js","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAEA;;;;;;;GAOG;AACH,MAAM,UAAU,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkElB,CAAC;AAEF;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,IAAU;IACzC,MAAM,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;AAC/B,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAAC,IAAU;IAC3C,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,KAAK,CAC7B;;;gBAGY,CACb,CAAC;IACF,OAAO,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AAC/B,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@evalstudio/postgres",
|
|
3
|
+
"publishConfig": {
|
|
4
|
+
"access": "public"
|
|
5
|
+
},
|
|
6
|
+
"version": "0.4.0",
|
|
7
|
+
"description": "PostgreSQL storage backend for EvalStudio",
|
|
8
|
+
"type": "module",
|
|
9
|
+
"main": "./dist/index.js",
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"exports": {
|
|
12
|
+
".": {
|
|
13
|
+
"types": "./dist/index.d.ts",
|
|
14
|
+
"import": "./dist/index.js"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"evalstudio",
|
|
19
|
+
"postgres",
|
|
20
|
+
"storage"
|
|
21
|
+
],
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "https://github.com/Treatwell-AI/evalstudio.git",
|
|
25
|
+
"directory": "packages/postgres"
|
|
26
|
+
},
|
|
27
|
+
"files": [
|
|
28
|
+
"dist"
|
|
29
|
+
],
|
|
30
|
+
"dependencies": {
|
|
31
|
+
"pg": "^8.13.1",
|
|
32
|
+
"@evalstudio/core": "0.4.0"
|
|
33
|
+
},
|
|
34
|
+
"devDependencies": {
|
|
35
|
+
"@types/pg": "^8.11.11",
|
|
36
|
+
"@types/node": "^22.10.10",
|
|
37
|
+
"typescript": "^5.7.3",
|
|
38
|
+
"vitest": "^3.0.4"
|
|
39
|
+
},
|
|
40
|
+
"engines": {
|
|
41
|
+
"node": ">=20"
|
|
42
|
+
},
|
|
43
|
+
"license": "MIT",
|
|
44
|
+
"scripts": {
|
|
45
|
+
"build": "tsc",
|
|
46
|
+
"dev": "tsc --watch",
|
|
47
|
+
"test": "vitest run --passWithNoTests",
|
|
48
|
+
"test:watch": "vitest",
|
|
49
|
+
"typecheck": "tsc --noEmit",
|
|
50
|
+
"lint": "eslint src",
|
|
51
|
+
"clean": "rm -rf dist"
|
|
52
|
+
}
|
|
53
|
+
}
|