@mastra/pg 0.3.1-alpha.3 → 0.3.1-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,23 +1,23 @@
1
1
 
2
- > @mastra/pg@0.3.1-alpha.3 build /home/runner/work/mastra/mastra/stores/pg
2
+ > @mastra/pg@0.3.1-alpha.4 build /home/runner/work/mastra/mastra/stores/pg
3
3
  > tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting
4
4
 
5
5
  CLI Building entry: src/index.ts
6
6
  CLI Using tsconfig: tsconfig.json
7
7
  CLI tsup v8.4.0
8
8
  TSC Build start
9
- TSC ⚡️ Build success in 11190ms
9
+ TSC ⚡️ Build success in 11471ms
10
10
  DTS Build start
11
11
  CLI Target: es2022
12
- Analysis will use the bundled TypeScript version 5.8.2
12
+ Analysis will use the bundled TypeScript version 5.8.3
13
13
  Writing package typings: /home/runner/work/mastra/mastra/stores/pg/dist/_tsup-dts-rollup.d.ts
14
- Analysis will use the bundled TypeScript version 5.8.2
14
+ Analysis will use the bundled TypeScript version 5.8.3
15
15
  Writing package typings: /home/runner/work/mastra/mastra/stores/pg/dist/_tsup-dts-rollup.d.cts
16
- DTS ⚡️ Build success in 11666ms
16
+ DTS ⚡️ Build success in 12539ms
17
17
  CLI Cleaning output folder
18
18
  ESM Build start
19
19
  CJS Build start
20
- ESM dist/index.js 48.40 KB
21
- ESM ⚡️ Build success in 1450ms
22
- CJS dist/index.cjs 48.82 KB
23
- CJS ⚡️ Build success in 1450ms
20
+ ESM dist/index.js 50.33 KB
21
+ ESM ⚡️ Build success in 1488ms
22
+ CJS dist/index.cjs 50.78 KB
23
+ CJS ⚡️ Build success in 1489ms
package/CHANGELOG.md CHANGED
@@ -1,5 +1,14 @@
1
1
  # @mastra/pg
2
2
 
3
+ ## 0.3.1-alpha.4
4
+
5
+ ### Patch Changes
6
+
7
+ - 479f490: [MASTRA-3131] Add getWorkflowRunByID and add resourceId as filter for getWorkflowRuns
8
+ - Updated dependencies [e4943b8]
9
+ - Updated dependencies [479f490]
10
+ - @mastra/core@0.9.1-alpha.4
11
+
3
12
  ## 0.3.1-alpha.3
4
13
 
5
14
  ### Patch Changes
@@ -25,6 +25,8 @@ import type { StorageThreadType } from '@mastra/core/memory';
25
25
  import type { TABLE_NAMES } from '@mastra/core/storage';
26
26
  import type { UpsertVectorParams } from '@mastra/core/vector';
27
27
  import type { VectorFilter } from '@mastra/core/vector/filter';
28
+ import type { WorkflowRun } from '@mastra/core/storage';
29
+ import type { WorkflowRuns } from '@mastra/core/storage';
28
30
  import type { WorkflowRunState } from '@mastra/core/workflows';
29
31
 
30
32
  export declare const baseTestConfigs: {
@@ -321,7 +323,7 @@ declare class PostgresStore extends MastraStorage {
321
323
  deleteThread({ threadId }: {
322
324
  threadId: string;
323
325
  }): Promise<void>;
324
- getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T>;
326
+ getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]>;
325
327
  saveMessages({ messages }: {
326
328
  messages: MessageType[];
327
329
  }): Promise<MessageType[]>;
@@ -334,22 +336,20 @@ declare class PostgresStore extends MastraStorage {
334
336
  workflowName: string;
335
337
  runId: string;
336
338
  }): Promise<WorkflowRunState | null>;
337
- getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
339
+ private hasColumn;
340
+ private parseWorkflowRun;
341
+ getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
338
342
  workflowName?: string;
339
343
  fromDate?: Date;
340
344
  toDate?: Date;
341
345
  limit?: number;
342
346
  offset?: number;
343
- }): Promise<{
344
- runs: Array<{
345
- workflowName: string;
346
- runId: string;
347
- snapshot: WorkflowRunState | string;
348
- createdAt: Date;
349
- updatedAt: Date;
350
- }>;
351
- total: number;
352
- }>;
347
+ resourceId?: string;
348
+ }): Promise<WorkflowRuns>;
349
+ getWorkflowRunById({ runId, workflowName, }: {
350
+ runId: string;
351
+ workflowName?: string;
352
+ }): Promise<WorkflowRun | null>;
353
353
  close(): Promise<void>;
354
354
  }
355
355
  export { PostgresStore }
@@ -25,6 +25,8 @@ import type { StorageThreadType } from '@mastra/core/memory';
25
25
  import type { TABLE_NAMES } from '@mastra/core/storage';
26
26
  import type { UpsertVectorParams } from '@mastra/core/vector';
27
27
  import type { VectorFilter } from '@mastra/core/vector/filter';
28
+ import type { WorkflowRun } from '@mastra/core/storage';
29
+ import type { WorkflowRuns } from '@mastra/core/storage';
28
30
  import type { WorkflowRunState } from '@mastra/core/workflows';
29
31
 
30
32
  export declare const baseTestConfigs: {
@@ -321,7 +323,7 @@ declare class PostgresStore extends MastraStorage {
321
323
  deleteThread({ threadId }: {
322
324
  threadId: string;
323
325
  }): Promise<void>;
324
- getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T>;
326
+ getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]>;
325
327
  saveMessages({ messages }: {
326
328
  messages: MessageType[];
327
329
  }): Promise<MessageType[]>;
@@ -334,22 +336,20 @@ declare class PostgresStore extends MastraStorage {
334
336
  workflowName: string;
335
337
  runId: string;
336
338
  }): Promise<WorkflowRunState | null>;
337
- getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
339
+ private hasColumn;
340
+ private parseWorkflowRun;
341
+ getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
338
342
  workflowName?: string;
339
343
  fromDate?: Date;
340
344
  toDate?: Date;
341
345
  limit?: number;
342
346
  offset?: number;
343
- }): Promise<{
344
- runs: Array<{
345
- workflowName: string;
346
- runId: string;
347
- snapshot: WorkflowRunState | string;
348
- createdAt: Date;
349
- updatedAt: Date;
350
- }>;
351
- total: number;
352
- }>;
347
+ resourceId?: string;
348
+ }): Promise<WorkflowRuns>;
349
+ getWorkflowRunById({ runId, workflowName, }: {
350
+ runId: string;
351
+ workflowName?: string;
352
+ }): Promise<WorkflowRun | null>;
353
353
  close(): Promise<void>;
354
354
  }
355
355
  export { PostgresStore }
package/dist/index.cjs CHANGED
@@ -1416,66 +1416,128 @@ var PostgresStore = class extends storage.MastraStorage {
1416
1416
  throw error;
1417
1417
  }
1418
1418
  }
1419
+ async hasColumn(table, column) {
1420
+ const schema = this.schema || "public";
1421
+ const result = await this.db.oneOrNone(
1422
+ `SELECT 1 FROM information_schema.columns WHERE table_schema = $1 AND table_name = $2 AND (column_name = $3 OR column_name = $4)`,
1423
+ [schema, table, column, column.toLowerCase()]
1424
+ );
1425
+ return !!result;
1426
+ }
1427
+ parseWorkflowRun(row) {
1428
+ let parsedSnapshot = row.snapshot;
1429
+ if (typeof parsedSnapshot === "string") {
1430
+ try {
1431
+ parsedSnapshot = JSON.parse(row.snapshot);
1432
+ } catch (e) {
1433
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1434
+ }
1435
+ }
1436
+ return {
1437
+ workflowName: row.workflow_name,
1438
+ runId: row.run_id,
1439
+ snapshot: parsedSnapshot,
1440
+ createdAt: row.createdAt,
1441
+ updatedAt: row.updatedAt,
1442
+ resourceId: row.resourceId
1443
+ };
1444
+ }
1419
1445
  async getWorkflowRuns({
1420
1446
  workflowName,
1421
1447
  fromDate,
1422
1448
  toDate,
1423
1449
  limit,
1424
- offset
1450
+ offset,
1451
+ resourceId
1425
1452
  } = {}) {
1426
- const conditions = [];
1427
- const values = [];
1428
- let paramIndex = 1;
1429
- if (workflowName) {
1430
- conditions.push(`workflow_name = $${paramIndex}`);
1431
- values.push(workflowName);
1432
- paramIndex++;
1433
- }
1434
- if (fromDate) {
1435
- conditions.push(`"createdAt" >= $${paramIndex}`);
1436
- values.push(fromDate);
1437
- paramIndex++;
1438
- }
1439
- if (toDate) {
1440
- conditions.push(`"createdAt" <= $${paramIndex}`);
1441
- values.push(toDate);
1442
- paramIndex++;
1443
- }
1444
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1445
- let total = 0;
1446
- if (limit !== void 0 && offset !== void 0) {
1447
- const countResult = await this.db.one(
1448
- `SELECT COUNT(*) as count FROM ${this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT)} ${whereClause}`,
1449
- values
1450
- );
1451
- total = Number(countResult.count);
1452
- }
1453
- const query = `
1453
+ try {
1454
+ const conditions = [];
1455
+ const values = [];
1456
+ let paramIndex = 1;
1457
+ if (workflowName) {
1458
+ conditions.push(`workflow_name = $${paramIndex}`);
1459
+ values.push(workflowName);
1460
+ paramIndex++;
1461
+ }
1462
+ if (resourceId) {
1463
+ const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
1464
+ if (hasResourceId) {
1465
+ conditions.push(`"resourceId" = $${paramIndex}`);
1466
+ values.push(resourceId);
1467
+ paramIndex++;
1468
+ } else {
1469
+ console.warn(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
1470
+ }
1471
+ }
1472
+ if (fromDate) {
1473
+ conditions.push(`"createdAt" >= $${paramIndex}`);
1474
+ values.push(fromDate);
1475
+ paramIndex++;
1476
+ }
1477
+ if (toDate) {
1478
+ conditions.push(`"createdAt" <= $${paramIndex}`);
1479
+ values.push(toDate);
1480
+ paramIndex++;
1481
+ }
1482
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1483
+ let total = 0;
1484
+ if (limit !== void 0 && offset !== void 0) {
1485
+ const countResult = await this.db.one(
1486
+ `SELECT COUNT(*) as count FROM ${this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT)} ${whereClause}`,
1487
+ values
1488
+ );
1489
+ total = Number(countResult.count);
1490
+ }
1491
+ const query = `
1454
1492
  SELECT * FROM ${this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT)}
1455
1493
  ${whereClause}
1456
1494
  ORDER BY "createdAt" DESC
1457
1495
  ${limit !== void 0 && offset !== void 0 ? ` LIMIT $${paramIndex} OFFSET $${paramIndex + 1}` : ""}
1458
1496
  `;
1459
- const queryValues = limit !== void 0 && offset !== void 0 ? [...values, limit, offset] : values;
1460
- const result = await this.db.manyOrNone(query, queryValues);
1461
- const runs = (result || []).map((row) => {
1462
- let parsedSnapshot = row.snapshot;
1463
- if (typeof parsedSnapshot === "string") {
1464
- try {
1465
- parsedSnapshot = JSON.parse(row.snapshot);
1466
- } catch (e) {
1467
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1468
- }
1497
+ const queryValues = limit !== void 0 && offset !== void 0 ? [...values, limit, offset] : values;
1498
+ const result = await this.db.manyOrNone(query, queryValues);
1499
+ const runs = (result || []).map((row) => {
1500
+ return this.parseWorkflowRun(row);
1501
+ });
1502
+ return { runs, total: total || runs.length };
1503
+ } catch (error) {
1504
+ console.error("Error getting workflow runs:", error);
1505
+ throw error;
1506
+ }
1507
+ }
1508
+ async getWorkflowRunById({
1509
+ runId,
1510
+ workflowName
1511
+ }) {
1512
+ try {
1513
+ const conditions = [];
1514
+ const values = [];
1515
+ let paramIndex = 1;
1516
+ if (runId) {
1517
+ conditions.push(`run_id = $${paramIndex}`);
1518
+ values.push(runId);
1519
+ paramIndex++;
1469
1520
  }
1470
- return {
1471
- workflowName: row.workflow_name,
1472
- runId: row.run_id,
1473
- snapshot: parsedSnapshot,
1474
- createdAt: row.createdAt,
1475
- updatedAt: row.updatedAt
1476
- };
1477
- });
1478
- return { runs, total: total || runs.length };
1521
+ if (workflowName) {
1522
+ conditions.push(`workflow_name = $${paramIndex}`);
1523
+ values.push(workflowName);
1524
+ paramIndex++;
1525
+ }
1526
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1527
+ const query = `
1528
+ SELECT * FROM ${this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT)}
1529
+ ${whereClause}
1530
+ `;
1531
+ const queryValues = values;
1532
+ const result = await this.db.oneOrNone(query, queryValues);
1533
+ if (!result) {
1534
+ return null;
1535
+ }
1536
+ return this.parseWorkflowRun(result);
1537
+ } catch (error) {
1538
+ console.error("Error getting workflow run by ID:", error);
1539
+ throw error;
1540
+ }
1479
1541
  }
1480
1542
  async close() {
1481
1543
  this.pgp.end();
package/dist/index.js CHANGED
@@ -1408,66 +1408,128 @@ var PostgresStore = class extends MastraStorage {
1408
1408
  throw error;
1409
1409
  }
1410
1410
  }
1411
+ async hasColumn(table, column) {
1412
+ const schema = this.schema || "public";
1413
+ const result = await this.db.oneOrNone(
1414
+ `SELECT 1 FROM information_schema.columns WHERE table_schema = $1 AND table_name = $2 AND (column_name = $3 OR column_name = $4)`,
1415
+ [schema, table, column, column.toLowerCase()]
1416
+ );
1417
+ return !!result;
1418
+ }
1419
+ parseWorkflowRun(row) {
1420
+ let parsedSnapshot = row.snapshot;
1421
+ if (typeof parsedSnapshot === "string") {
1422
+ try {
1423
+ parsedSnapshot = JSON.parse(row.snapshot);
1424
+ } catch (e) {
1425
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1426
+ }
1427
+ }
1428
+ return {
1429
+ workflowName: row.workflow_name,
1430
+ runId: row.run_id,
1431
+ snapshot: parsedSnapshot,
1432
+ createdAt: row.createdAt,
1433
+ updatedAt: row.updatedAt,
1434
+ resourceId: row.resourceId
1435
+ };
1436
+ }
1411
1437
  async getWorkflowRuns({
1412
1438
  workflowName,
1413
1439
  fromDate,
1414
1440
  toDate,
1415
1441
  limit,
1416
- offset
1442
+ offset,
1443
+ resourceId
1417
1444
  } = {}) {
1418
- const conditions = [];
1419
- const values = [];
1420
- let paramIndex = 1;
1421
- if (workflowName) {
1422
- conditions.push(`workflow_name = $${paramIndex}`);
1423
- values.push(workflowName);
1424
- paramIndex++;
1425
- }
1426
- if (fromDate) {
1427
- conditions.push(`"createdAt" >= $${paramIndex}`);
1428
- values.push(fromDate);
1429
- paramIndex++;
1430
- }
1431
- if (toDate) {
1432
- conditions.push(`"createdAt" <= $${paramIndex}`);
1433
- values.push(toDate);
1434
- paramIndex++;
1435
- }
1436
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1437
- let total = 0;
1438
- if (limit !== void 0 && offset !== void 0) {
1439
- const countResult = await this.db.one(
1440
- `SELECT COUNT(*) as count FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)} ${whereClause}`,
1441
- values
1442
- );
1443
- total = Number(countResult.count);
1444
- }
1445
- const query = `
1445
+ try {
1446
+ const conditions = [];
1447
+ const values = [];
1448
+ let paramIndex = 1;
1449
+ if (workflowName) {
1450
+ conditions.push(`workflow_name = $${paramIndex}`);
1451
+ values.push(workflowName);
1452
+ paramIndex++;
1453
+ }
1454
+ if (resourceId) {
1455
+ const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
1456
+ if (hasResourceId) {
1457
+ conditions.push(`"resourceId" = $${paramIndex}`);
1458
+ values.push(resourceId);
1459
+ paramIndex++;
1460
+ } else {
1461
+ console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
1462
+ }
1463
+ }
1464
+ if (fromDate) {
1465
+ conditions.push(`"createdAt" >= $${paramIndex}`);
1466
+ values.push(fromDate);
1467
+ paramIndex++;
1468
+ }
1469
+ if (toDate) {
1470
+ conditions.push(`"createdAt" <= $${paramIndex}`);
1471
+ values.push(toDate);
1472
+ paramIndex++;
1473
+ }
1474
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1475
+ let total = 0;
1476
+ if (limit !== void 0 && offset !== void 0) {
1477
+ const countResult = await this.db.one(
1478
+ `SELECT COUNT(*) as count FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)} ${whereClause}`,
1479
+ values
1480
+ );
1481
+ total = Number(countResult.count);
1482
+ }
1483
+ const query = `
1446
1484
  SELECT * FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)}
1447
1485
  ${whereClause}
1448
1486
  ORDER BY "createdAt" DESC
1449
1487
  ${limit !== void 0 && offset !== void 0 ? ` LIMIT $${paramIndex} OFFSET $${paramIndex + 1}` : ""}
1450
1488
  `;
1451
- const queryValues = limit !== void 0 && offset !== void 0 ? [...values, limit, offset] : values;
1452
- const result = await this.db.manyOrNone(query, queryValues);
1453
- const runs = (result || []).map((row) => {
1454
- let parsedSnapshot = row.snapshot;
1455
- if (typeof parsedSnapshot === "string") {
1456
- try {
1457
- parsedSnapshot = JSON.parse(row.snapshot);
1458
- } catch (e) {
1459
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1460
- }
1489
+ const queryValues = limit !== void 0 && offset !== void 0 ? [...values, limit, offset] : values;
1490
+ const result = await this.db.manyOrNone(query, queryValues);
1491
+ const runs = (result || []).map((row) => {
1492
+ return this.parseWorkflowRun(row);
1493
+ });
1494
+ return { runs, total: total || runs.length };
1495
+ } catch (error) {
1496
+ console.error("Error getting workflow runs:", error);
1497
+ throw error;
1498
+ }
1499
+ }
1500
+ async getWorkflowRunById({
1501
+ runId,
1502
+ workflowName
1503
+ }) {
1504
+ try {
1505
+ const conditions = [];
1506
+ const values = [];
1507
+ let paramIndex = 1;
1508
+ if (runId) {
1509
+ conditions.push(`run_id = $${paramIndex}`);
1510
+ values.push(runId);
1511
+ paramIndex++;
1461
1512
  }
1462
- return {
1463
- workflowName: row.workflow_name,
1464
- runId: row.run_id,
1465
- snapshot: parsedSnapshot,
1466
- createdAt: row.createdAt,
1467
- updatedAt: row.updatedAt
1468
- };
1469
- });
1470
- return { runs, total: total || runs.length };
1513
+ if (workflowName) {
1514
+ conditions.push(`workflow_name = $${paramIndex}`);
1515
+ values.push(workflowName);
1516
+ paramIndex++;
1517
+ }
1518
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1519
+ const query = `
1520
+ SELECT * FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)}
1521
+ ${whereClause}
1522
+ `;
1523
+ const queryValues = values;
1524
+ const result = await this.db.oneOrNone(query, queryValues);
1525
+ if (!result) {
1526
+ return null;
1527
+ }
1528
+ return this.parseWorkflowRun(result);
1529
+ } catch (error) {
1530
+ console.error("Error getting workflow run by ID:", error);
1531
+ throw error;
1532
+ }
1471
1533
  }
1472
1534
  async close() {
1473
1535
  this.pgp.end();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/pg",
3
- "version": "0.3.1-alpha.3",
3
+ "version": "0.3.1-alpha.4",
4
4
  "description": "Postgres provider for Mastra - includes both vector and db storage capabilities",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -24,16 +24,16 @@
24
24
  "pg": "^8.13.3",
25
25
  "pg-promise": "^11.11.0",
26
26
  "xxhash-wasm": "^1.1.0",
27
- "@mastra/core": "^0.9.1-alpha.3"
27
+ "@mastra/core": "^0.9.1-alpha.4"
28
28
  },
29
29
  "devDependencies": {
30
- "@microsoft/api-extractor": "^7.52.1",
30
+ "@microsoft/api-extractor": "^7.52.5",
31
31
  "@types/node": "^20.17.27",
32
32
  "@types/pg": "^8.11.11",
33
33
  "eslint": "^9.23.0",
34
34
  "tsup": "^8.4.0",
35
35
  "typescript": "^5.8.2",
36
- "vitest": "^3.0.9",
36
+ "vitest": "^3.1.2",
37
37
  "@internal/lint": "0.0.2"
38
38
  },
39
39
  "scripts": {
@@ -4,7 +4,7 @@ import type { MessageType } from '@mastra/core/memory';
4
4
  import { TABLE_WORKFLOW_SNAPSHOT, TABLE_MESSAGES, TABLE_THREADS, TABLE_EVALS } from '@mastra/core/storage';
5
5
  import type { WorkflowRunState } from '@mastra/core/workflows';
6
6
  import pgPromise from 'pg-promise';
7
- import { describe, it, expect, beforeAll, beforeEach, afterAll, afterEach } from 'vitest';
7
+ import { describe, it, expect, beforeAll, beforeEach, afterAll, afterEach, vi } from 'vitest';
8
8
 
9
9
  import { PostgresStore } from '.';
10
10
  import type { PostgresConfig } from '.';
@@ -19,6 +19,8 @@ const TEST_CONFIG: PostgresConfig = {
19
19
 
20
20
  const connectionString = `postgresql://${TEST_CONFIG.user}:${TEST_CONFIG.password}@${TEST_CONFIG.host}:${TEST_CONFIG.port}/${TEST_CONFIG.database}`;
21
21
 
22
+ vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
23
+
22
24
  // Sample test data factory functions
23
25
  const createSampleThread = () => ({
24
26
  id: `thread-${randomUUID()}`,
@@ -29,17 +31,20 @@ const createSampleThread = () => ({
29
31
  metadata: { key: 'value' },
30
32
  });
31
33
 
32
- const createSampleMessage = (threadId: string) =>
33
- ({
34
- id: `msg-${randomUUID()}`,
35
- role: 'user',
36
- type: 'text',
37
- threadId,
38
- content: [{ type: 'text', text: 'Hello' }],
39
- createdAt: new Date(),
40
- }) as any;
41
-
42
- const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
34
+ const createSampleMessage = (threadId: string): MessageType => ({
35
+ id: `msg-${randomUUID()}`,
36
+ resourceId: `resource-${randomUUID()}`,
37
+ role: 'user',
38
+ type: 'text',
39
+ threadId,
40
+ content: [{ type: 'text', text: 'Hello' }],
41
+ createdAt: new Date(),
42
+ });
43
+
44
+ const createSampleWorkflowSnapshot = (
45
+ status: WorkflowRunState['context']['steps'][string]['status'],
46
+ createdAt?: Date,
47
+ ) => {
43
48
  const runId = `run-${randomUUID()}`;
44
49
  const stepId = `step-${randomUUID()}`;
45
50
  const timestamp = createdAt || new Date();
@@ -58,9 +63,10 @@ const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
58
63
  attempts: {},
59
64
  },
60
65
  activePaths: [],
66
+ suspendedPaths: {},
61
67
  runId,
62
68
  timestamp: timestamp.getTime(),
63
- } as WorkflowRunState;
69
+ };
64
70
  return { snapshot, runId, stepId };
65
71
  };
66
72
 
@@ -82,6 +88,13 @@ const createSampleEval = (agentName: string, isTest = false) => {
82
88
  };
83
89
  };
84
90
 
91
+ const checkWorkflowSnapshot = (snapshot: WorkflowRunState | string, stepId: string, status: string) => {
92
+ if (typeof snapshot === 'string') {
93
+ throw new Error('Expected WorkflowRunState, got string');
94
+ }
95
+ expect(snapshot.context?.steps[stepId]?.status).toBe(status);
96
+ };
97
+
85
98
  describe('PostgresStore', () => {
86
99
  let store: PostgresStore;
87
100
 
@@ -233,7 +246,11 @@ describe('PostgresStore', () => {
233
246
  // Retrieve messages
234
247
  const retrievedMessages = await store.getMessages({ threadId: thread.id });
235
248
  expect(retrievedMessages).toHaveLength(2);
236
- expect(retrievedMessages).toEqual(expect.arrayContaining(messages));
249
+ const checkMessages = messages.map(m => {
250
+ const { resourceId, ...rest } = m;
251
+ return rest;
252
+ });
253
+ expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
237
254
  });
238
255
 
239
256
  it('should handle empty message array', async () => {
@@ -253,12 +270,13 @@ describe('PostgresStore', () => {
253
270
 
254
271
  await store.saveMessages({ messages });
255
272
 
256
- const retrievedMessages = await store.getMessages({ threadId: thread.id });
273
+ const retrievedMessages = await store.getMessages<MessageType>({ threadId: thread.id });
257
274
  expect(retrievedMessages).toHaveLength(3);
258
275
 
259
276
  // Verify order is maintained
260
277
  retrievedMessages.forEach((msg, idx) => {
261
- expect((msg.content[0] as any).text).toBe((messages[idx].content[0] as any).text);
278
+ // @ts-expect-error
279
+ expect(msg.content[0].text).toBe(messages[idx].content[0].text);
262
280
  });
263
281
  });
264
282
 
@@ -338,11 +356,17 @@ describe('PostgresStore', () => {
338
356
  const snapshot = {
339
357
  status: 'running',
340
358
  context: {
359
+ steps: {},
341
360
  stepResults: {},
342
361
  attempts: {},
343
362
  triggerData: { type: 'manual' },
344
363
  },
345
- } as any;
364
+ value: {},
365
+ activePaths: [],
366
+ suspendedPaths: {},
367
+ runId,
368
+ timestamp: new Date().getTime(),
369
+ };
346
370
 
347
371
  await store.persistWorkflowSnapshot({
348
372
  workflowName,
@@ -373,28 +397,40 @@ describe('PostgresStore', () => {
373
397
  const initialSnapshot = {
374
398
  status: 'running',
375
399
  context: {
400
+ steps: {},
376
401
  stepResults: {},
377
402
  attempts: {},
378
403
  triggerData: { type: 'manual' },
379
404
  },
405
+ value: {},
406
+ activePaths: [],
407
+ suspendedPaths: {},
408
+ runId,
409
+ timestamp: new Date().getTime(),
380
410
  };
381
411
 
382
412
  await store.persistWorkflowSnapshot({
383
413
  workflowName,
384
414
  runId,
385
- snapshot: initialSnapshot as any,
415
+ snapshot: initialSnapshot,
386
416
  });
387
417
 
388
418
  const updatedSnapshot = {
389
419
  status: 'completed',
390
420
  context: {
421
+ steps: {},
391
422
  stepResults: {
392
423
  'step-1': { status: 'success', result: { data: 'test' } },
393
424
  },
394
425
  attempts: { 'step-1': 1 },
395
426
  triggerData: { type: 'manual' },
396
427
  },
397
- } as any;
428
+ value: {},
429
+ activePaths: [],
430
+ suspendedPaths: {},
431
+ runId,
432
+ timestamp: new Date().getTime(),
433
+ };
398
434
 
399
435
  await store.persistWorkflowSnapshot({
400
436
  workflowName,
@@ -432,6 +468,7 @@ describe('PostgresStore', () => {
432
468
  dependencies: ['step-3', 'step-4'],
433
469
  },
434
470
  },
471
+ steps: {},
435
472
  attempts: { 'step-1': 1, 'step-2': 0 },
436
473
  triggerData: {
437
474
  type: 'scheduled',
@@ -453,6 +490,7 @@ describe('PostgresStore', () => {
453
490
  status: 'waiting',
454
491
  },
455
492
  ],
493
+ suspendedPaths: {},
456
494
  runId: runId,
457
495
  timestamp: Date.now(),
458
496
  };
@@ -460,7 +498,7 @@ describe('PostgresStore', () => {
460
498
  await store.persistWorkflowSnapshot({
461
499
  workflowName,
462
500
  runId,
463
- snapshot: complexSnapshot as unknown as WorkflowRunState,
501
+ snapshot: complexSnapshot,
464
502
  });
465
503
 
466
504
  const loadedSnapshot = await store.loadWorkflowSnapshot({
@@ -486,8 +524,8 @@ describe('PostgresStore', () => {
486
524
  const workflowName1 = 'default_test_1';
487
525
  const workflowName2 = 'default_test_2';
488
526
 
489
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
490
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
527
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
528
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
491
529
 
492
530
  await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
493
531
  await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
@@ -498,17 +536,17 @@ describe('PostgresStore', () => {
498
536
  expect(total).toBe(2);
499
537
  expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
500
538
  expect(runs[1]!.workflowName).toBe(workflowName1);
501
- const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
502
- const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
503
- expect(firstSnapshot.context?.steps[stepId2]?.status).toBe('running');
504
- expect(secondSnapshot.context?.steps[stepId1]?.status).toBe('completed');
539
+ const firstSnapshot = runs[0]!.snapshot;
540
+ const secondSnapshot = runs[1]!.snapshot;
541
+ checkWorkflowSnapshot(firstSnapshot, stepId2, 'failed');
542
+ checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
505
543
  });
506
544
 
507
545
  it('filters by workflow name', async () => {
508
546
  const workflowName1 = 'filter_test_1';
509
547
  const workflowName2 = 'filter_test_2';
510
548
 
511
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
549
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
512
550
  const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
513
551
 
514
552
  await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
@@ -519,8 +557,8 @@ describe('PostgresStore', () => {
519
557
  expect(runs).toHaveLength(1);
520
558
  expect(total).toBe(1);
521
559
  expect(runs[0]!.workflowName).toBe(workflowName1);
522
- const snapshot = runs[0]!.snapshot as WorkflowRunState;
523
- expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
560
+ const snapshot = runs[0]!.snapshot;
561
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
524
562
  });
525
563
 
526
564
  it('filters by date range', async () => {
@@ -531,9 +569,9 @@ describe('PostgresStore', () => {
531
569
  const workflowName2 = 'date_test_2';
532
570
  const workflowName3 = 'date_test_3';
533
571
 
534
- const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('completed');
535
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
536
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
572
+ const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
573
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
574
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('suspended');
537
575
 
538
576
  await store.insert({
539
577
  tableName: TABLE_WORKFLOW_SNAPSHOT,
@@ -574,10 +612,10 @@ describe('PostgresStore', () => {
574
612
  expect(runs).toHaveLength(2);
575
613
  expect(runs[0]!.workflowName).toBe(workflowName3);
576
614
  expect(runs[1]!.workflowName).toBe(workflowName2);
577
- const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
578
- const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
579
- expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
580
- expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
615
+ const firstSnapshot = runs[0]!.snapshot;
616
+ const secondSnapshot = runs[1]!.snapshot;
617
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'suspended');
618
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'failed');
581
619
  });
582
620
 
583
621
  it('handles pagination', async () => {
@@ -585,9 +623,9 @@ describe('PostgresStore', () => {
585
623
  const workflowName2 = 'page_test_2';
586
624
  const workflowName3 = 'page_test_3';
587
625
 
588
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
589
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
590
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
626
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
627
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
628
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('suspended');
591
629
 
592
630
  await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
593
631
  await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
@@ -601,18 +639,119 @@ describe('PostgresStore', () => {
601
639
  expect(page1.total).toBe(3); // Total count of all records
602
640
  expect(page1.runs[0]!.workflowName).toBe(workflowName3);
603
641
  expect(page1.runs[1]!.workflowName).toBe(workflowName2);
604
- const firstSnapshot = page1.runs[0]!.snapshot as WorkflowRunState;
605
- const secondSnapshot = page1.runs[1]!.snapshot as WorkflowRunState;
606
- expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
607
- expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
642
+ const firstSnapshot = page1.runs[0]!.snapshot;
643
+ const secondSnapshot = page1.runs[1]!.snapshot;
644
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'suspended');
645
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'failed');
608
646
 
609
647
  // Get second page
610
648
  const page2 = await store.getWorkflowRuns({ limit: 2, offset: 2 });
611
649
  expect(page2.runs).toHaveLength(1);
612
650
  expect(page2.total).toBe(3);
613
651
  expect(page2.runs[0]!.workflowName).toBe(workflowName1);
614
- const snapshot = page2.runs[0]!.snapshot as WorkflowRunState;
615
- expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
652
+ const snapshot = page2.runs[0]!.snapshot;
653
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
654
+ });
655
+ });
656
+
657
+ describe('getWorkflowRunById', () => {
658
+ const workflowName = 'workflow-id-test';
659
+ let runId: string;
660
+ let stepId: string;
661
+
662
+ beforeEach(async () => {
663
+ // Insert a workflow run for positive test
664
+ const sample = createSampleWorkflowSnapshot('success');
665
+ runId = sample.runId;
666
+ stepId = sample.stepId;
667
+ await store.insert({
668
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
669
+ record: {
670
+ workflow_name: workflowName,
671
+ run_id: runId,
672
+ resourceId: 'resource-abc',
673
+ snapshot: sample.snapshot,
674
+ createdAt: new Date(),
675
+ updatedAt: new Date(),
676
+ },
677
+ });
678
+ });
679
+
680
+ it('should retrieve a workflow run by ID', async () => {
681
+ const found = await store.getWorkflowRunById({
682
+ runId,
683
+ workflowName,
684
+ });
685
+ expect(found).not.toBeNull();
686
+ expect(found?.runId).toBe(runId);
687
+ checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
688
+ });
689
+
690
+ it('should return null for non-existent workflow run ID', async () => {
691
+ const notFound = await store.getWorkflowRunById({
692
+ runId: 'non-existent-id',
693
+ workflowName,
694
+ });
695
+ expect(notFound).toBeNull();
696
+ });
697
+ });
698
+ describe('getWorkflowRuns with resourceId', () => {
699
+ const workflowName = 'workflow-id-test';
700
+ let resourceId: string;
701
+ let runIds: string[] = [];
702
+
703
+ beforeEach(async () => {
704
+ // Insert multiple workflow runs for the same resourceId
705
+ resourceId = 'resource-shared';
706
+ for (const status of ['success', 'failed']) {
707
+ const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context']['steps'][string]['status']);
708
+ runIds.push(sample.runId);
709
+ await store.insert({
710
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
711
+ record: {
712
+ workflow_name: workflowName,
713
+ run_id: sample.runId,
714
+ resourceId,
715
+ snapshot: sample.snapshot,
716
+ createdAt: new Date(),
717
+ updatedAt: new Date(),
718
+ },
719
+ });
720
+ }
721
+ // Insert a run with a different resourceId
722
+ const other = createSampleWorkflowSnapshot('waiting');
723
+ await store.insert({
724
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
725
+ record: {
726
+ workflow_name: workflowName,
727
+ run_id: other.runId,
728
+ resourceId: 'resource-other',
729
+ snapshot: other.snapshot,
730
+ createdAt: new Date(),
731
+ updatedAt: new Date(),
732
+ },
733
+ });
734
+ });
735
+
736
+ it('should retrieve all workflow runs by resourceId', async () => {
737
+ const { runs } = await store.getWorkflowRuns({
738
+ resourceId,
739
+ workflowName,
740
+ });
741
+ expect(Array.isArray(runs)).toBe(true);
742
+ expect(runs.length).toBeGreaterThanOrEqual(2);
743
+ for (const run of runs) {
744
+ expect(run.resourceId).toBe(resourceId);
745
+ }
746
+ });
747
+
748
+ it('should return an empty array if no workflow runs match resourceId', async () => {
749
+ const { runs } = await store.getWorkflowRuns({
750
+ resourceId: 'non-existent-resource',
751
+ workflowName,
752
+ });
753
+ expect(Array.isArray(runs)).toBe(true);
754
+ expect(runs.length).toBe(0);
616
755
  });
617
756
  });
618
757
 
@@ -699,6 +838,38 @@ describe('PostgresStore', () => {
699
838
  });
700
839
  });
701
840
 
841
+ describe('hasColumn', () => {
842
+ const tempTable = 'temp_test_table';
843
+
844
+ beforeEach(async () => {
845
+ // Always try to drop the table before each test, ignore errors if it doesn't exist
846
+ try {
847
+ await store['db'].query(`DROP TABLE IF EXISTS ${tempTable}`);
848
+ } catch {
849
+ /* ignore */
850
+ }
851
+ });
852
+
853
+ it('returns true if the column exists', async () => {
854
+ await store['db'].query(`CREATE TABLE ${tempTable} (id SERIAL PRIMARY KEY, resourceId TEXT)`);
855
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
856
+ });
857
+
858
+ it('returns false if the column does not exist', async () => {
859
+ await store['db'].query(`CREATE TABLE ${tempTable} (id SERIAL PRIMARY KEY)`);
860
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
861
+ });
862
+
863
+ afterEach(async () => {
864
+ // Always try to drop the table after each test, ignore errors if it doesn't exist
865
+ try {
866
+ await store['db'].query(`DROP TABLE IF EXISTS ${tempTable}`);
867
+ } catch {
868
+ /* ignore */
869
+ }
870
+ });
871
+ });
872
+
702
873
  describe('Schema Support', () => {
703
874
  const customSchema = 'mastra_test';
704
875
  let customSchemaStore: PostgresStore;
@@ -8,7 +8,14 @@ import {
8
8
  TABLE_WORKFLOW_SNAPSHOT,
9
9
  TABLE_EVALS,
10
10
  } from '@mastra/core/storage';
11
- import type { EvalRow, StorageColumn, StorageGetMessagesArg, TABLE_NAMES } from '@mastra/core/storage';
11
+ import type {
12
+ EvalRow,
13
+ StorageColumn,
14
+ StorageGetMessagesArg,
15
+ TABLE_NAMES,
16
+ WorkflowRun,
17
+ WorkflowRuns,
18
+ } from '@mastra/core/storage';
12
19
  import type { WorkflowRunState } from '@mastra/core/workflows';
13
20
  import pgPromise from 'pg-promise';
14
21
  import type { ISSLConfig } from 'pg-promise/typescript/pg-subset';
@@ -561,7 +568,7 @@ export class PostgresStore extends MastraStorage {
561
568
  }
562
569
  }
563
570
 
564
- async getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T> {
571
+ async getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]> {
565
572
  try {
566
573
  const messages: any[] = [];
567
574
  const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
@@ -645,7 +652,7 @@ export class PostgresStore extends MastraStorage {
645
652
  }
646
653
  });
647
654
 
648
- return messages as T;
655
+ return messages as T[];
649
656
  } catch (error) {
650
657
  console.error('Error getting messages:', error);
651
658
  throw error;
@@ -748,96 +755,166 @@ export class PostgresStore extends MastraStorage {
748
755
  }
749
756
  }
750
757
 
758
+ private async hasColumn(table: string, column: string): Promise<boolean> {
759
+ // Use this.schema to scope the check
760
+ const schema = this.schema || 'public';
761
+ const result = await this.db.oneOrNone(
762
+ `SELECT 1 FROM information_schema.columns WHERE table_schema = $1 AND table_name = $2 AND (column_name = $3 OR column_name = $4)`,
763
+ [schema, table, column, column.toLowerCase()],
764
+ );
765
+ return !!result;
766
+ }
767
+
768
+ private parseWorkflowRun(row: any): WorkflowRun {
769
+ let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
770
+ if (typeof parsedSnapshot === 'string') {
771
+ try {
772
+ parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
773
+ } catch (e) {
774
+ // If parsing fails, return the raw snapshot string
775
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
776
+ }
777
+ }
778
+
779
+ return {
780
+ workflowName: row.workflow_name,
781
+ runId: row.run_id,
782
+ snapshot: parsedSnapshot,
783
+ createdAt: row.createdAt,
784
+ updatedAt: row.updatedAt,
785
+ resourceId: row.resourceId,
786
+ };
787
+ }
788
+
751
789
  async getWorkflowRuns({
752
790
  workflowName,
753
791
  fromDate,
754
792
  toDate,
755
793
  limit,
756
794
  offset,
795
+ resourceId,
757
796
  }: {
758
797
  workflowName?: string;
759
798
  fromDate?: Date;
760
799
  toDate?: Date;
761
800
  limit?: number;
762
801
  offset?: number;
763
- } = {}): Promise<{
764
- runs: Array<{
765
- workflowName: string;
766
- runId: string;
767
- snapshot: WorkflowRunState | string;
768
- createdAt: Date;
769
- updatedAt: Date;
770
- }>;
771
- total: number;
772
- }> {
773
- const conditions: string[] = [];
774
- const values: any[] = [];
775
- let paramIndex = 1;
776
-
777
- if (workflowName) {
778
- conditions.push(`workflow_name = $${paramIndex}`);
779
- values.push(workflowName);
780
- paramIndex++;
781
- }
782
-
783
- if (fromDate) {
784
- conditions.push(`"createdAt" >= $${paramIndex}`);
785
- values.push(fromDate);
786
- paramIndex++;
787
- }
802
+ resourceId?: string;
803
+ } = {}): Promise<WorkflowRuns> {
804
+ try {
805
+ const conditions: string[] = [];
806
+ const values: any[] = [];
807
+ let paramIndex = 1;
808
+
809
+ if (workflowName) {
810
+ conditions.push(`workflow_name = $${paramIndex}`);
811
+ values.push(workflowName);
812
+ paramIndex++;
813
+ }
788
814
 
789
- if (toDate) {
790
- conditions.push(`"createdAt" <= $${paramIndex}`);
791
- values.push(toDate);
792
- paramIndex++;
793
- }
815
+ if (resourceId) {
816
+ const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
817
+ if (hasResourceId) {
818
+ conditions.push(`"resourceId" = $${paramIndex}`);
819
+ values.push(resourceId);
820
+ paramIndex++;
821
+ } else {
822
+ console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
823
+ }
824
+ }
794
825
 
795
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
826
+ if (fromDate) {
827
+ conditions.push(`"createdAt" >= $${paramIndex}`);
828
+ values.push(fromDate);
829
+ paramIndex++;
830
+ }
796
831
 
797
- let total = 0;
798
- // Only get total count when using pagination
799
- if (limit !== undefined && offset !== undefined) {
800
- const countResult = await this.db.one(
801
- `SELECT COUNT(*) as count FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)} ${whereClause}`,
802
- values,
803
- );
804
- total = Number(countResult.count);
805
- }
832
+ if (toDate) {
833
+ conditions.push(`"createdAt" <= $${paramIndex}`);
834
+ values.push(toDate);
835
+ paramIndex++;
836
+ }
837
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
838
+
839
+ let total = 0;
840
+ // Only get total count when using pagination
841
+ if (limit !== undefined && offset !== undefined) {
842
+ const countResult = await this.db.one(
843
+ `SELECT COUNT(*) as count FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)} ${whereClause}`,
844
+ values,
845
+ );
846
+ total = Number(countResult.count);
847
+ }
806
848
 
807
- // Get results
808
- const query = `
849
+ // Get results
850
+ const query = `
809
851
  SELECT * FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)}
810
852
  ${whereClause}
811
853
  ORDER BY "createdAt" DESC
812
854
  ${limit !== undefined && offset !== undefined ? ` LIMIT $${paramIndex} OFFSET $${paramIndex + 1}` : ''}
813
855
  `;
814
856
 
815
- const queryValues = limit !== undefined && offset !== undefined ? [...values, limit, offset] : values;
857
+ const queryValues = limit !== undefined && offset !== undefined ? [...values, limit, offset] : values;
816
858
 
817
- const result = await this.db.manyOrNone(query, queryValues);
859
+ const result = await this.db.manyOrNone(query, queryValues);
818
860
 
819
- const runs = (result || []).map(row => {
820
- let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
821
- if (typeof parsedSnapshot === 'string') {
822
- try {
823
- parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
824
- } catch (e) {
825
- // If parsing fails, return the raw snapshot string
826
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
827
- }
861
+ const runs = (result || []).map(row => {
862
+ return this.parseWorkflowRun(row);
863
+ });
864
+
865
+ // Use runs.length as total when not paginating
866
+ return { runs, total: total || runs.length };
867
+ } catch (error) {
868
+ console.error('Error getting workflow runs:', error);
869
+ throw error;
870
+ }
871
+ }
872
+
873
+ async getWorkflowRunById({
874
+ runId,
875
+ workflowName,
876
+ }: {
877
+ runId: string;
878
+ workflowName?: string;
879
+ }): Promise<WorkflowRun | null> {
880
+ try {
881
+ const conditions: string[] = [];
882
+ const values: any[] = [];
883
+ let paramIndex = 1;
884
+
885
+ if (runId) {
886
+ conditions.push(`run_id = $${paramIndex}`);
887
+ values.push(runId);
888
+ paramIndex++;
828
889
  }
829
890
 
830
- return {
831
- workflowName: row.workflow_name,
832
- runId: row.run_id,
833
- snapshot: parsedSnapshot,
834
- createdAt: row.createdAt,
835
- updatedAt: row.updatedAt,
836
- };
837
- });
891
+ if (workflowName) {
892
+ conditions.push(`workflow_name = $${paramIndex}`);
893
+ values.push(workflowName);
894
+ paramIndex++;
895
+ }
896
+
897
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
898
+
899
+ // Get results
900
+ const query = `
901
+ SELECT * FROM ${this.getTableName(TABLE_WORKFLOW_SNAPSHOT)}
902
+ ${whereClause}
903
+ `;
904
+
905
+ const queryValues = values;
838
906
 
839
- // Use runs.length as total when not paginating
840
- return { runs, total: total || runs.length };
907
+ const result = await this.db.oneOrNone(query, queryValues);
908
+
909
+ if (!result) {
910
+ return null;
911
+ }
912
+
913
+ return this.parseWorkflowRun(result);
914
+ } catch (error) {
915
+ console.error('Error getting workflow run by ID:', error);
916
+ throw error;
917
+ }
841
918
  }
842
919
 
843
920
  async close(): Promise<void> {