@pantheon.ai/agents 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md
CHANGED
|
@@ -49,6 +49,28 @@ To avoid running it twice, apply the rest of the file to `pantheon_agents`:
|
|
|
49
49
|
sed '1d' src/db/schema/tidb.sql | mysql --host 127.0.0.1 --port 4000 -u root pantheon_agents
|
|
50
50
|
```
|
|
51
51
|
|
|
52
|
+
## Versioned Migrations
|
|
53
|
+
|
|
54
|
+
When schema changes across versions, add versioned migration SQL files under:
|
|
55
|
+
|
|
56
|
+
- `src/db/migrations/tidb`
|
|
57
|
+
- `src/db/migrations/db9`
|
|
58
|
+
|
|
59
|
+
Example migration added in this version:
|
|
60
|
+
|
|
61
|
+
- `src/db/migrations/tidb/20260304_0001_add_task_retry_columns.sql`
|
|
62
|
+
- `src/db/migrations/db9/20260304_0001_add_task_retry_columns.sql`
|
|
63
|
+
|
|
64
|
+
Apply manually:
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
# TiDB/MySQL
|
|
68
|
+
mysql --host 127.0.0.1 --port 4000 -u root pantheon_agents < src/db/migrations/tidb/20260304_0001_add_task_retry_columns.sql
|
|
69
|
+
|
|
70
|
+
# db9/PostgreSQL
|
|
71
|
+
psql "$DATABASE_URL" -f src/db/migrations/db9/20260304_0001_add_task_retry_columns.sql
|
|
72
|
+
```
|
|
73
|
+
|
|
52
74
|
## Developer Local Setup
|
|
53
75
|
|
|
54
76
|
This section is only for developing this package locally.
|
|
@@ -136,6 +158,9 @@ pantheon-agents show-config <agent-name>
|
|
|
136
158
|
pantheon-agents show-tasks --all
|
|
137
159
|
pantheon-agents get-task <agent-name> <task-id>
|
|
138
160
|
pantheon-agents cancel-task <agent-name> <task-id> [reason] --yes
|
|
161
|
+
pantheon-agents retry-task <agent-name> <task-id> [reason] --yes
|
|
162
|
+
pantheon-agents skill.sh
|
|
163
|
+
pantheon-agents gen-migration-sql --provider tidb --from 0.3.0
|
|
139
164
|
pantheon-agents delete-task <agent-name> <task-id>
|
|
140
165
|
```
|
|
141
166
|
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
-- introduced_in: 0.3.1
|
|
2
|
+
-- Add retry metadata and policy columns for db9/PostgreSQL.
|
|
3
|
+
|
|
4
|
+
ALTER TABLE task
|
|
5
|
+
ADD COLUMN IF NOT EXISTS attempt_count INT NOT NULL DEFAULT 1;
|
|
6
|
+
|
|
7
|
+
ALTER TABLE agent_project_config
|
|
8
|
+
ADD COLUMN IF NOT EXISTS max_retry_attempts INT NOT NULL DEFAULT 3;
|
|
9
|
+
|
|
10
|
+
ALTER TABLE agent_project_config
|
|
11
|
+
ADD COLUMN IF NOT EXISTS retry_backoff_seconds INT NOT NULL DEFAULT 30;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
-- introduced_in: 0.3.1
|
|
2
|
+
-- Add retry metadata and policy columns for TiDB/MySQL.
|
|
3
|
+
|
|
4
|
+
ALTER TABLE task
|
|
5
|
+
ADD COLUMN IF NOT EXISTS attempt_count INT NOT NULL DEFAULT 1;
|
|
6
|
+
|
|
7
|
+
ALTER TABLE agent_project_config
|
|
8
|
+
ADD COLUMN IF NOT EXISTS max_retry_attempts INT NOT NULL DEFAULT 3,
|
|
9
|
+
ADD COLUMN IF NOT EXISTS retry_backoff_seconds INT NOT NULL DEFAULT 30;
|
package/dist/index.js
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
import { createRequire } from "node:module";
|
|
3
3
|
import { Command, InvalidArgumentError, createCommand } from "commander";
|
|
4
4
|
import process$1 from "node:process";
|
|
5
|
-
import * as fs from "node:fs";
|
|
5
|
+
import * as fs$1 from "node:fs";
|
|
6
|
+
import fs from "node:fs";
|
|
6
7
|
import path from "node:path";
|
|
7
8
|
import { multistream, pino, transport } from "pino";
|
|
8
9
|
import { Kysely, MysqlDialect, PostgresDialect } from "kysely";
|
|
@@ -11,6 +12,7 @@ import z$1, { z } from "zod";
|
|
|
11
12
|
import { createPool } from "mysql2";
|
|
12
13
|
import readline from "node:readline/promises";
|
|
13
14
|
import expandTilde from "expand-tilde";
|
|
15
|
+
import { fileURLToPath } from "node:url";
|
|
14
16
|
import blessed from "reblessed";
|
|
15
17
|
import { inspect } from "node:util";
|
|
16
18
|
import { parse } from "shell-quote";
|
|
@@ -85,7 +87,7 @@ var require_package = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
|
85
87
|
//#endregion
|
|
86
88
|
//#region ../../node_modules/dotenv/lib/main.js
|
|
87
89
|
var require_main = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
88
|
-
const fs$
|
|
90
|
+
const fs$2 = __require("fs");
|
|
89
91
|
const path$1 = __require("path");
|
|
90
92
|
const os = __require("os");
|
|
91
93
|
const crypto = __require("crypto");
|
|
@@ -222,10 +224,10 @@ var require_main = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
|
222
224
|
function _vaultPath(options) {
|
|
223
225
|
let possibleVaultPath = null;
|
|
224
226
|
if (options && options.path && options.path.length > 0) if (Array.isArray(options.path)) {
|
|
225
|
-
for (const filepath of options.path) if (fs$
|
|
227
|
+
for (const filepath of options.path) if (fs$2.existsSync(filepath)) possibleVaultPath = filepath.endsWith(".vault") ? filepath : `${filepath}.vault`;
|
|
226
228
|
} else possibleVaultPath = options.path.endsWith(".vault") ? options.path : `${options.path}.vault`;
|
|
227
229
|
else possibleVaultPath = path$1.resolve(process.cwd(), ".env.vault");
|
|
228
|
-
if (fs$
|
|
230
|
+
if (fs$2.existsSync(possibleVaultPath)) return possibleVaultPath;
|
|
229
231
|
return null;
|
|
230
232
|
}
|
|
231
233
|
function _resolveHome(envPath) {
|
|
@@ -259,7 +261,7 @@ var require_main = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
|
259
261
|
let lastError;
|
|
260
262
|
const parsedAll = {};
|
|
261
263
|
for (const path of optionPaths) try {
|
|
262
|
-
const parsed = DotenvModule.parse(fs$
|
|
264
|
+
const parsed = DotenvModule.parse(fs$2.readFileSync(path, { encoding }));
|
|
263
265
|
DotenvModule.populate(parsedAll, parsed, options);
|
|
264
266
|
} catch (e) {
|
|
265
267
|
if (debug) _debug(`Failed to load ${path} ${e.message}`);
|
|
@@ -397,7 +399,7 @@ var require_cli_options = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
|
397
399
|
|
|
398
400
|
//#endregion
|
|
399
401
|
//#region package.json
|
|
400
|
-
var version = "0.
|
|
402
|
+
var version = "0.3.0";
|
|
401
403
|
|
|
402
404
|
//#endregion
|
|
403
405
|
//#region src/db/db9.ts
|
|
@@ -427,8 +429,10 @@ const taskCommonSchema = z.object({
|
|
|
427
429
|
base_branch_id: z.uuid(),
|
|
428
430
|
parent_task_id: z.string().nullable().optional(),
|
|
429
431
|
config_version: z.number().int().positive().nullable().default(null),
|
|
432
|
+
attempt_count: z.coerce.number().int().positive().optional(),
|
|
430
433
|
cancel_reason: z.string().nullable().default(null),
|
|
431
|
-
queued_at: z.coerce.date()
|
|
434
|
+
queued_at: z.coerce.date(),
|
|
435
|
+
error: z.string().nullable().optional()
|
|
432
436
|
});
|
|
433
437
|
const taskItemSchema = z.discriminatedUnion("status", [
|
|
434
438
|
taskCommonSchema.extend({ status: z.literal("pending") }),
|
|
@@ -510,6 +514,7 @@ var TaskListProvider = class {
|
|
|
510
514
|
base_branch_id: params.base_branch_id,
|
|
511
515
|
parent_task_id: parentTaskId,
|
|
512
516
|
config_version: params.type === "reconfig" || params.type === "bootstrap" ? params.configVersion : null,
|
|
517
|
+
attempt_count: 1,
|
|
513
518
|
cancel_reason: null,
|
|
514
519
|
queued_at: /* @__PURE__ */ new Date()
|
|
515
520
|
});
|
|
@@ -630,6 +635,8 @@ var TaskListDb9Provider = class extends TaskListProvider {
|
|
|
630
635
|
"agent_project_config.config_version",
|
|
631
636
|
"agent_project_config.config_task_id",
|
|
632
637
|
"agent_project_config.concurrency",
|
|
638
|
+
"agent_project_config.max_retry_attempts",
|
|
639
|
+
"agent_project_config.retry_backoff_seconds",
|
|
633
640
|
"agent_project_config.role",
|
|
634
641
|
"agent_project_config.skills",
|
|
635
642
|
"agent_project_config.prototype_url",
|
|
@@ -646,6 +653,8 @@ var TaskListDb9Provider = class extends TaskListProvider {
|
|
|
646
653
|
"agent_project_config.config_version",
|
|
647
654
|
"agent_project_config.config_task_id",
|
|
648
655
|
"agent_project_config.concurrency",
|
|
656
|
+
"agent_project_config.max_retry_attempts",
|
|
657
|
+
"agent_project_config.retry_backoff_seconds",
|
|
649
658
|
"agent_project_config.role",
|
|
650
659
|
"agent_project_config.skills",
|
|
651
660
|
"agent_project_config.prototype_url",
|
|
@@ -653,16 +662,24 @@ var TaskListDb9Provider = class extends TaskListProvider {
|
|
|
653
662
|
]).select((eb) => eb.ref("task.status").$castTo().as("config_task_status")).where("agent_project_config.agent", "=", this.agentName).execute();
|
|
654
663
|
}
|
|
655
664
|
async setAgentConfig({ skills, ...config }) {
|
|
665
|
+
const maxRetryAttempts = config.max_retry_attempts ?? 3;
|
|
666
|
+
const retryBackoffSeconds = config.retry_backoff_seconds ?? 30;
|
|
656
667
|
await this.db.insertInto("agent_project_config").values({
|
|
657
668
|
agent: this.agentName,
|
|
658
669
|
skills: JSON.stringify(skills),
|
|
659
|
-
...config
|
|
670
|
+
...config,
|
|
671
|
+
max_retry_attempts: maxRetryAttempts,
|
|
672
|
+
retry_backoff_seconds: retryBackoffSeconds
|
|
660
673
|
}).execute();
|
|
661
674
|
}
|
|
662
675
|
async updateAgentConfig({ skills, ...config }) {
|
|
676
|
+
const maxRetryAttempts = config.max_retry_attempts;
|
|
677
|
+
const retryBackoffSeconds = config.retry_backoff_seconds;
|
|
663
678
|
const result = await this.db.updateTable("agent_project_config").set({
|
|
664
679
|
skills: JSON.stringify(skills),
|
|
665
|
-
...config
|
|
680
|
+
...config,
|
|
681
|
+
...maxRetryAttempts == null ? {} : { max_retry_attempts: maxRetryAttempts },
|
|
682
|
+
...retryBackoffSeconds == null ? {} : { retry_backoff_seconds: retryBackoffSeconds }
|
|
666
683
|
}).where("agent", "=", this.agentName).where("project_id", "=", config.project_id).executeTakeFirst();
|
|
667
684
|
if (Number(result.numUpdatedRows ?? 0) === 0) throw new Error(`No config found to update for agent ${this.agentName} and project ${config.project_id}.`);
|
|
668
685
|
}
|
|
@@ -704,13 +721,15 @@ var TaskListDb9Provider = class extends TaskListProvider {
|
|
|
704
721
|
}
|
|
705
722
|
async insertTask(taskItem) {
|
|
706
723
|
const { started_at, ended_at, queued_at, cancelled_at, ...rest } = taskItem;
|
|
724
|
+
const attemptCount = taskItem.attempt_count ?? 1;
|
|
707
725
|
const inserted = await this.db.insertInto("task").values({
|
|
708
726
|
agent: this.agentName,
|
|
709
727
|
started_at,
|
|
710
728
|
ended_at,
|
|
711
729
|
queued_at,
|
|
712
730
|
cancelled_at,
|
|
713
|
-
...rest
|
|
731
|
+
...rest,
|
|
732
|
+
attempt_count: attemptCount
|
|
714
733
|
}).returningAll().executeTakeFirstOrThrow();
|
|
715
734
|
return taskItemSchema.parse(inserted);
|
|
716
735
|
}
|
|
@@ -765,16 +784,24 @@ var TaskListTidbProvider = class extends TaskListProvider {
|
|
|
765
784
|
return await this.db.selectFrom("agent_project_config").innerJoin("task", "task.id", "agent_project_config.config_task_id").selectAll("agent_project_config").select((eb) => eb.ref("task.status").$castTo().as("config_task_status")).where("agent_project_config.agent", "=", this.agentName).execute();
|
|
766
785
|
}
|
|
767
786
|
async setAgentConfig({ skills, ...config }) {
|
|
787
|
+
const maxRetryAttempts = config.max_retry_attempts ?? 3;
|
|
788
|
+
const retryBackoffSeconds = config.retry_backoff_seconds ?? 30;
|
|
768
789
|
await this.db.insertInto("agent_project_config").values({
|
|
769
790
|
agent: this.agentName,
|
|
770
791
|
skills: JSON.stringify(skills),
|
|
771
|
-
...config
|
|
792
|
+
...config,
|
|
793
|
+
max_retry_attempts: maxRetryAttempts,
|
|
794
|
+
retry_backoff_seconds: retryBackoffSeconds
|
|
772
795
|
}).execute();
|
|
773
796
|
}
|
|
774
797
|
async updateAgentConfig({ skills, ...config }) {
|
|
798
|
+
const maxRetryAttempts = config.max_retry_attempts;
|
|
799
|
+
const retryBackoffSeconds = config.retry_backoff_seconds;
|
|
775
800
|
const result = await this.db.updateTable("agent_project_config").set({
|
|
776
801
|
skills: JSON.stringify(skills),
|
|
777
|
-
...config
|
|
802
|
+
...config,
|
|
803
|
+
...maxRetryAttempts == null ? {} : { max_retry_attempts: maxRetryAttempts },
|
|
804
|
+
...retryBackoffSeconds == null ? {} : { retry_backoff_seconds: retryBackoffSeconds }
|
|
778
805
|
}).where("agent", "=", this.agentName).where("project_id", "=", config.project_id).executeTakeFirst();
|
|
779
806
|
if (Number(result.numUpdatedRows ?? 0) === 0) throw new Error(`No config found to update for agent ${this.agentName} and project ${config.project_id}.`);
|
|
780
807
|
}
|
|
@@ -816,13 +843,15 @@ var TaskListTidbProvider = class extends TaskListProvider {
|
|
|
816
843
|
}
|
|
817
844
|
async insertTask(taskItem) {
|
|
818
845
|
const { started_at, ended_at, queued_at, cancelled_at, ...rest } = taskItem;
|
|
846
|
+
const attemptCount = taskItem.attempt_count ?? 1;
|
|
819
847
|
const { insertId } = await this.db.insertInto("task").values({
|
|
820
848
|
agent: this.agentName,
|
|
821
849
|
started_at,
|
|
822
850
|
ended_at,
|
|
823
851
|
queued_at,
|
|
824
852
|
cancelled_at,
|
|
825
|
-
...rest
|
|
853
|
+
...rest,
|
|
854
|
+
attempt_count: attemptCount
|
|
826
855
|
}).executeTakeFirstOrThrow();
|
|
827
856
|
return {
|
|
828
857
|
...taskItem,
|
|
@@ -1171,7 +1200,7 @@ const branchExecutionResultSchema = z.object({
|
|
|
1171
1200
|
status: z.string(),
|
|
1172
1201
|
status_text: z.string(),
|
|
1173
1202
|
branch_id: z.string(),
|
|
1174
|
-
snap_id: z.string(),
|
|
1203
|
+
snap_id: z.string().nullable(),
|
|
1175
1204
|
background_task_id: z.string().nullable(),
|
|
1176
1205
|
started_at: zodJsonDate,
|
|
1177
1206
|
last_polled_at: zodJsonDate,
|
|
@@ -1248,7 +1277,7 @@ const explorationResultSchema = z.object({
|
|
|
1248
1277
|
status: z.string(),
|
|
1249
1278
|
status_text: z.string().nullable(),
|
|
1250
1279
|
branch_id: z.string(),
|
|
1251
|
-
snap_id: z.string(),
|
|
1280
|
+
snap_id: z.string().nullable(),
|
|
1252
1281
|
background_task_id: z.string().nullable(),
|
|
1253
1282
|
started_at: zodJsonDate,
|
|
1254
1283
|
last_polled_at: zodJsonDate,
|
|
@@ -2057,7 +2086,65 @@ async function executeOnPantheon({ projectId, branchId, prompt, agent }) {
|
|
|
2057
2086
|
//#endregion
|
|
2058
2087
|
//#region src/core/task-list.ts
|
|
2059
2088
|
const DEFAULT_CONCURRENCY = 1;
|
|
2089
|
+
const DEFAULT_MAX_RETRY_ATTEMPTS$1 = 3;
|
|
2090
|
+
const DEFAULT_RETRY_BACKOFF_SECONDS$1 = 30;
|
|
2060
2091
|
const AUTO_RECONFIG_PROMPT = "Config outdated. Reconfigure this branch against the latest agent config.";
|
|
2092
|
+
function getTaskAttemptCount(task) {
|
|
2093
|
+
if (Number.isInteger(task.attempt_count) && task.attempt_count > 0) return task.attempt_count;
|
|
2094
|
+
return 1;
|
|
2095
|
+
}
|
|
2096
|
+
function getMaxRetryAttempts(config, logger) {
|
|
2097
|
+
const configured = config.max_retry_attempts;
|
|
2098
|
+
if (configured == null) return DEFAULT_MAX_RETRY_ATTEMPTS$1;
|
|
2099
|
+
if (Number.isInteger(configured) && configured >= 0) return configured;
|
|
2100
|
+
logger.warn("Invalid max_retry_attempts=%o for project %s; fallback to %d.", configured, config.project_id, DEFAULT_MAX_RETRY_ATTEMPTS$1);
|
|
2101
|
+
return DEFAULT_MAX_RETRY_ATTEMPTS$1;
|
|
2102
|
+
}
|
|
2103
|
+
function getRetryBackoffSeconds(config, logger) {
|
|
2104
|
+
const configured = config.retry_backoff_seconds;
|
|
2105
|
+
if (configured == null) return DEFAULT_RETRY_BACKOFF_SECONDS$1;
|
|
2106
|
+
if (Number.isInteger(configured) && configured > 0) return configured;
|
|
2107
|
+
logger.warn("Invalid retry_backoff_seconds=%o for project %s; fallback to %d.", configured, config.project_id, DEFAULT_RETRY_BACKOFF_SECONDS$1);
|
|
2108
|
+
return DEFAULT_RETRY_BACKOFF_SECONDS$1;
|
|
2109
|
+
}
|
|
2110
|
+
function getBackoffDelaySeconds(backoffBaseSeconds, nextAttemptCount) {
|
|
2111
|
+
return backoffBaseSeconds * 2 ** (Math.max(1, nextAttemptCount - 1) - 1);
|
|
2112
|
+
}
|
|
2113
|
+
async function retryFailedTaskInPlace(provider, task, logger, options = {}) {
|
|
2114
|
+
const config = await provider.getAgentConfig(task.project_id);
|
|
2115
|
+
if (!config) {
|
|
2116
|
+
logger.warn("Skip retry for task %s because project config %s is missing.", task.id, task.project_id);
|
|
2117
|
+
return null;
|
|
2118
|
+
}
|
|
2119
|
+
const currentAttemptCount = getTaskAttemptCount(task);
|
|
2120
|
+
const retriesUsed = Math.max(0, currentAttemptCount - 1);
|
|
2121
|
+
const maxRetryAttempts = getMaxRetryAttempts(config, logger);
|
|
2122
|
+
if (!options.ignoreRetryLimit && retriesUsed >= maxRetryAttempts) {
|
|
2123
|
+
logger.info("Task %s reached max retries (%d). Keep failed status.", task.id, maxRetryAttempts);
|
|
2124
|
+
return null;
|
|
2125
|
+
}
|
|
2126
|
+
const nextAttemptCount = currentAttemptCount + 1;
|
|
2127
|
+
const now = Date.now();
|
|
2128
|
+
const delaySeconds = options.immediate ? 0 : getBackoffDelaySeconds(getRetryBackoffSeconds(config, logger), nextAttemptCount);
|
|
2129
|
+
const queuedAt = new Date(now + delaySeconds * 1e3);
|
|
2130
|
+
const retriedTask = {
|
|
2131
|
+
status: "pending",
|
|
2132
|
+
id: task.id,
|
|
2133
|
+
task: task.task,
|
|
2134
|
+
type: task.type,
|
|
2135
|
+
project_id: task.project_id,
|
|
2136
|
+
base_branch_id: task.base_branch_id,
|
|
2137
|
+
parent_task_id: task.parent_task_id ?? null,
|
|
2138
|
+
config_version: task.config_version ?? null,
|
|
2139
|
+
attempt_count: nextAttemptCount,
|
|
2140
|
+
cancel_reason: task.cancel_reason ?? null,
|
|
2141
|
+
queued_at: queuedAt,
|
|
2142
|
+
error: task.error
|
|
2143
|
+
};
|
|
2144
|
+
await provider.updateTask(retriedTask);
|
|
2145
|
+
logger.info("Requeued failed task %s for attempt %d at %s%s.", task.id, nextAttemptCount, queuedAt.toISOString(), options.reason ? ` (reason: ${options.reason})` : "");
|
|
2146
|
+
return retriedTask;
|
|
2147
|
+
}
|
|
2061
2148
|
function buildTaskPromptSequence(config, taskPrompt, taskType = "default") {
|
|
2062
2149
|
if (taskType === "bootstrap" && taskPrompt === "$setup") return [buildConfigSetupStep({
|
|
2063
2150
|
prototypeRepoUrl: config.prototype_url,
|
|
@@ -2144,6 +2231,8 @@ async function pollRunningTaskState(provider, state, logger) {
|
|
|
2144
2231
|
} else if (newStatus.state === "failed") {
|
|
2145
2232
|
logger.info(`Task failed on Branch[id = ${newStatus.branch.id},snap_id=${newStatus.branch.latest_snap_id}] with error: %s`, newStatus.error);
|
|
2146
2233
|
await provider.failTask(state, newStatus.error);
|
|
2234
|
+
const failedTask = await provider.getTask(state.id);
|
|
2235
|
+
if (failedTask?.status === "failed") await retryFailedTaskInPlace(provider, failedTask, logger.child({ name: `task:${state.id}:auto-retry` }));
|
|
2147
2236
|
}
|
|
2148
2237
|
}
|
|
2149
2238
|
function isTaskOutdated(task, config) {
|
|
@@ -2268,6 +2357,7 @@ async function startPendingTasksUpToConcurrency(provider, pendingTasks, runningT
|
|
|
2268
2357
|
return config;
|
|
2269
2358
|
};
|
|
2270
2359
|
for (const task of pendingTasks) {
|
|
2360
|
+
if (task.queued_at.getTime() > Date.now()) continue;
|
|
2271
2361
|
const config = await getProjectConfig(task.project_id);
|
|
2272
2362
|
if (!config) {
|
|
2273
2363
|
if (!missingConfigProjects.has(task.project_id)) {
|
|
@@ -2284,6 +2374,8 @@ async function startPendingTasksUpToConcurrency(provider, pendingTasks, runningT
|
|
|
2284
2374
|
runningCountByProject.set(task.project_id, runningCount + 1);
|
|
2285
2375
|
} catch (e) {
|
|
2286
2376
|
logger.error(`Failed to start task ${task.id}: ${getErrorMessage(e)}`);
|
|
2377
|
+
const failedTask = await provider.getTask(task.id);
|
|
2378
|
+
if (failedTask?.status === "failed") await retryFailedTaskInPlace(provider, failedTask, logger.child({ name: `task:${task.id}:auto-retry` }));
|
|
2287
2379
|
}
|
|
2288
2380
|
}
|
|
2289
2381
|
}
|
|
@@ -2713,10 +2805,12 @@ var WatchStepAggregator = class {
|
|
|
2713
2805
|
|
|
2714
2806
|
//#endregion
|
|
2715
2807
|
//#region src/core/index.ts
|
|
2808
|
+
const DEFAULT_MAX_RETRY_ATTEMPTS = 3;
|
|
2809
|
+
const DEFAULT_RETRY_BACKOFF_SECONDS = 30;
|
|
2716
2810
|
async function runAgent(name, options, logger) {
|
|
2717
2811
|
const agentDir = path.join(options.dataDir, "agents", name);
|
|
2718
2812
|
const pidFile = path.join(agentDir, "pid");
|
|
2719
|
-
await fs.promises.mkdir(agentDir, { recursive: true });
|
|
2813
|
+
await fs$1.promises.mkdir(agentDir, { recursive: true });
|
|
2720
2814
|
await assertsSingleton(logger, pidFile);
|
|
2721
2815
|
await startTaskListLoop(name, { loopInterval: options.loopInterval }, logger);
|
|
2722
2816
|
}
|
|
@@ -2749,6 +2843,18 @@ async function configAgent(name, options) {
|
|
|
2749
2843
|
process.exitCode = 1;
|
|
2750
2844
|
return;
|
|
2751
2845
|
}
|
|
2846
|
+
const maxRetryAttempts = options.maxRetryAttempts ?? previousConfig?.max_retry_attempts ?? DEFAULT_MAX_RETRY_ATTEMPTS;
|
|
2847
|
+
if (!Number.isInteger(maxRetryAttempts) || maxRetryAttempts < 0) {
|
|
2848
|
+
console.error("--max-retry-attempts must be a non-negative integer.");
|
|
2849
|
+
process.exitCode = 1;
|
|
2850
|
+
return;
|
|
2851
|
+
}
|
|
2852
|
+
const retryBackoffSeconds = options.retryBackoffSeconds ?? previousConfig?.retry_backoff_seconds ?? DEFAULT_RETRY_BACKOFF_SECONDS;
|
|
2853
|
+
if (!Number.isInteger(retryBackoffSeconds) || retryBackoffSeconds <= 0) {
|
|
2854
|
+
console.error("--retry-backoff-seconds must be a positive integer.");
|
|
2855
|
+
process.exitCode = 1;
|
|
2856
|
+
return;
|
|
2857
|
+
}
|
|
2752
2858
|
const resolvedSkills = options.skills ?? normalizeSkills(previousConfig?.skills);
|
|
2753
2859
|
const resolvedExecuteAgent = options.executeAgent.trim() || previousConfig?.execute_agent || "codex";
|
|
2754
2860
|
const resolvedPrototypeUrl = options.prototypeUrl.trim() || previousConfig?.prototype_url || "https://github.com/pingcap-inc/pantheon-agents";
|
|
@@ -2779,6 +2885,8 @@ async function configAgent(name, options) {
|
|
|
2779
2885
|
config_task_id: configTaskId,
|
|
2780
2886
|
config_version: previousConfig.config_version + (options.prompt ? 2 : 1),
|
|
2781
2887
|
concurrency,
|
|
2888
|
+
max_retry_attempts: maxRetryAttempts,
|
|
2889
|
+
retry_backoff_seconds: retryBackoffSeconds,
|
|
2782
2890
|
execute_agent: resolvedExecuteAgent,
|
|
2783
2891
|
role: resolvedRole ?? previousConfig.role,
|
|
2784
2892
|
skills: resolvedSkills,
|
|
@@ -2846,6 +2954,8 @@ async function configAgent(name, options) {
|
|
|
2846
2954
|
config_task_id: configTaskId,
|
|
2847
2955
|
config_version: configVersion,
|
|
2848
2956
|
concurrency,
|
|
2957
|
+
max_retry_attempts: maxRetryAttempts,
|
|
2958
|
+
retry_backoff_seconds: retryBackoffSeconds,
|
|
2849
2959
|
execute_agent: resolvedExecuteAgent,
|
|
2850
2960
|
role: resolvedRole,
|
|
2851
2961
|
skills: resolvedSkills,
|
|
@@ -2857,6 +2967,8 @@ async function configAgent(name, options) {
|
|
|
2857
2967
|
config_task_id: configTaskId,
|
|
2858
2968
|
config_version: configVersion,
|
|
2859
2969
|
concurrency,
|
|
2970
|
+
max_retry_attempts: maxRetryAttempts,
|
|
2971
|
+
retry_backoff_seconds: retryBackoffSeconds,
|
|
2860
2972
|
execute_agent: resolvedExecuteAgent,
|
|
2861
2973
|
role: resolvedRole ?? previousConfig.role,
|
|
2862
2974
|
skills: resolvedSkills,
|
|
@@ -2936,6 +3048,22 @@ async function cancelTask(agentName, taskId, reason) {
|
|
|
2936
3048
|
await provider.close();
|
|
2937
3049
|
}
|
|
2938
3050
|
}
|
|
3051
|
+
async function retryTask(agentName, taskId, reason) {
|
|
3052
|
+
const provider = createTaskListProvider(agentName, pino());
|
|
3053
|
+
try {
|
|
3054
|
+
const task = await provider.getTask(taskId);
|
|
3055
|
+
if (!task) return null;
|
|
3056
|
+
if (task.status !== "failed") throw new Error(`Task ${taskId} is ${task.status}; only failed tasks can be retried.`);
|
|
3057
|
+
if (!await retryFailedTaskInPlace(provider, task, pino(), {
|
|
3058
|
+
immediate: true,
|
|
3059
|
+
reason,
|
|
3060
|
+
ignoreRetryLimit: true
|
|
3061
|
+
})) throw new Error(`Task ${taskId} retry was not scheduled.`);
|
|
3062
|
+
return await provider.getTask(taskId);
|
|
3063
|
+
} finally {
|
|
3064
|
+
await provider.close();
|
|
3065
|
+
}
|
|
3066
|
+
}
|
|
2939
3067
|
async function showAgentConfig(agentName, projectId) {
|
|
2940
3068
|
const provider = createTaskListProvider(agentName, pino());
|
|
2941
3069
|
try {
|
|
@@ -3038,24 +3166,26 @@ function formatConciseTaskLine(options) {
|
|
|
3038
3166
|
const statusText = formatStatus(task.status, useColor);
|
|
3039
3167
|
const timeText = formatRelativeTime(timestamp);
|
|
3040
3168
|
const taskText = truncateText(task.task, maxTaskLength);
|
|
3169
|
+
const attempt = Number.isInteger(task.attempt_count) && task.attempt_count > 0 ? task.attempt_count : 1;
|
|
3041
3170
|
return [
|
|
3042
3171
|
agent,
|
|
3043
3172
|
statusText,
|
|
3044
3173
|
task.id,
|
|
3174
|
+
`attempt:${attempt}`,
|
|
3045
3175
|
timeText,
|
|
3046
3176
|
taskText
|
|
3047
3177
|
].filter((part) => part !== "").join(" ");
|
|
3048
3178
|
}
|
|
3049
3179
|
async function assertsSingleton(logger, pidFile) {
|
|
3050
3180
|
try {
|
|
3051
|
-
const pid = await fs.promises.readFile(pidFile, "utf-8");
|
|
3181
|
+
const pid = await fs$1.promises.readFile(pidFile, "utf-8");
|
|
3052
3182
|
process.kill(parseInt(pid), 0);
|
|
3053
3183
|
console.error("Failed to assert singleton agent process:");
|
|
3054
3184
|
process.exit(1);
|
|
3055
3185
|
} catch (e) {
|
|
3056
|
-
await fs.promises.writeFile(pidFile, process.pid.toString());
|
|
3186
|
+
await fs$1.promises.writeFile(pidFile, process.pid.toString());
|
|
3057
3187
|
process.on("exit", () => {
|
|
3058
|
-
fs.promises.rm(pidFile);
|
|
3188
|
+
fs$1.promises.rm(pidFile);
|
|
3059
3189
|
});
|
|
3060
3190
|
}
|
|
3061
3191
|
}
|
|
@@ -3224,8 +3354,18 @@ function parseConcurrency(value) {
|
|
|
3224
3354
|
if (!Number.isInteger(parsed) || parsed < 1) throw new InvalidArgumentError("concurrency must be a positive integer.");
|
|
3225
3355
|
return parsed;
|
|
3226
3356
|
}
|
|
3357
|
+
function parseNonNegativeInteger(value, fieldName) {
|
|
3358
|
+
const parsed = Number(value);
|
|
3359
|
+
if (!Number.isInteger(parsed) || parsed < 0) throw new InvalidArgumentError(`${fieldName} must be a non-negative integer.`);
|
|
3360
|
+
return parsed;
|
|
3361
|
+
}
|
|
3362
|
+
function parsePositiveInteger(value, fieldName) {
|
|
3363
|
+
const parsed = Number(value);
|
|
3364
|
+
if (!Number.isInteger(parsed) || parsed <= 0) throw new InvalidArgumentError(`${fieldName} must be a positive integer.`);
|
|
3365
|
+
return parsed;
|
|
3366
|
+
}
|
|
3227
3367
|
function createConfigAgentCommand(version, deps = {}) {
|
|
3228
|
-
return createCommand("config").version(version).description("Queue a configuration task for an agent/project").argument("<name>", "The name of the agent.").argument("[prompt]", "The configuration task prompt.").option("--project-id <project-id>", "The project id of the agent. Defaults to DEFAULT_PANTHEON_PROJECT_ID.").option("--task-id <task-id>", "Optional parent task id.").option("--role <role>", "Role metadata. Required for first-time project config; optional override later.").option("--skills <skills>", "The skills of the agent. Multiple values are separated by comma.", parseUniqueCommaList).option("--execute-agent <agent>", "The execute agent of the agent.", "codex").option("--concurrency <number>", "Max number of parallel running tasks for this project.", parseConcurrency).option("--root-branch-id <branchId>", "The root branch id of the agent. Defaults to DEFAULT_PANTHEON_ROOT_BRANCH_ID, then project root branch id.").option("--prototype-url <url>", "Role and skill definitions repo.", "https://github.com/pingcap-inc/pantheon-agents").action(async function() {
|
|
3368
|
+
return createCommand("config").version(version).description("Queue a configuration task for an agent/project").argument("<name>", "The name of the agent.").argument("[prompt]", "The configuration task prompt.").option("--project-id <project-id>", "The project id of the agent. Defaults to DEFAULT_PANTHEON_PROJECT_ID.").option("--task-id <task-id>", "Optional parent task id.").option("--role <role>", "Role metadata. Required for first-time project config; optional override later.").option("--skills <skills>", "The skills of the agent. Multiple values are separated by comma.", parseUniqueCommaList).option("--execute-agent <agent>", "The execute agent of the agent.", "codex").option("--concurrency <number>", "Max number of parallel running tasks for this project.", parseConcurrency).option("--max-retry-attempts <number>", "Max automatic retry attempts per task after failures.", (value) => parseNonNegativeInteger(value, "max-retry-attempts")).option("--retry-backoff-seconds <seconds>", "Base delay (seconds) for exponential retry backoff.", (value) => parsePositiveInteger(value, "retry-backoff-seconds")).option("--root-branch-id <branchId>", "The root branch id of the agent. Defaults to DEFAULT_PANTHEON_ROOT_BRANCH_ID, then project root branch id.").option("--prototype-url <url>", "Role and skill definitions repo.", "https://github.com/pingcap-inc/pantheon-agents").action(async function() {
|
|
3229
3369
|
const [name, prompt] = this.args;
|
|
3230
3370
|
const options = this.opts();
|
|
3231
3371
|
const resolvedProjectId = resolvePantheonProjectId(options.projectId);
|
|
@@ -3243,6 +3383,8 @@ function createConfigAgentCommand(version, deps = {}) {
|
|
|
3243
3383
|
role: options.role,
|
|
3244
3384
|
executeAgent: options.executeAgent,
|
|
3245
3385
|
concurrency: options.concurrency,
|
|
3386
|
+
maxRetryAttempts: options.maxRetryAttempts,
|
|
3387
|
+
retryBackoffSeconds: options.retryBackoffSeconds,
|
|
3246
3388
|
skills: options.skills,
|
|
3247
3389
|
prototypeUrl: options.prototypeUrl,
|
|
3248
3390
|
rootBranchId: resolvedRootBranchId
|
|
@@ -3297,9 +3439,11 @@ function formatTaskAsSkillTask(agent, task) {
|
|
|
3297
3439
|
const endedAt = "ended_at" in task ? task.ended_at ?? null : null;
|
|
3298
3440
|
const canceledAt = task.status === "cancelled" ? task.cancelled_at : null;
|
|
3299
3441
|
const finishedAt = endedAt ?? canceledAt ?? null;
|
|
3442
|
+
const attempt = Number.isInteger(task.attempt_count) && task.attempt_count > 0 ? task.attempt_count : 1;
|
|
3300
3443
|
return {
|
|
3301
3444
|
agent,
|
|
3302
3445
|
task: task.task,
|
|
3446
|
+
attempt,
|
|
3303
3447
|
parent_task_id: task.parent_task_id ?? null,
|
|
3304
3448
|
status: toSkillTaskStatus(task.status),
|
|
3305
3449
|
queued_at: task.queued_at.toISOString(),
|
|
@@ -3309,7 +3453,7 @@ function formatTaskAsSkillTask(agent, task) {
|
|
|
3309
3453
|
canceled_at: toIsoOrNull(canceledAt),
|
|
3310
3454
|
cancel_reason: task.cancel_reason ?? null,
|
|
3311
3455
|
output: task.status === "completed" ? task.output : null,
|
|
3312
|
-
error: task.
|
|
3456
|
+
error: task.error ?? null
|
|
3313
3457
|
};
|
|
3314
3458
|
}
|
|
3315
3459
|
function createGetTaskCommand(version, deps = {}) {
|
|
@@ -3390,6 +3534,8 @@ function createShowConfigCommand(version) {
|
|
|
3390
3534
|
config_version: config.config_version,
|
|
3391
3535
|
config_task_id: config.config_task_id,
|
|
3392
3536
|
concurrency: config.concurrency,
|
|
3537
|
+
max_retry_attempts: config.max_retry_attempts ?? 3,
|
|
3538
|
+
retry_backoff_seconds: config.retry_backoff_seconds ?? 30,
|
|
3393
3539
|
role: config.role,
|
|
3394
3540
|
execute_agent: config.execute_agent,
|
|
3395
3541
|
prototype_url: config.prototype_url,
|
|
@@ -3414,6 +3560,8 @@ function createShowConfigCommand(version) {
|
|
|
3414
3560
|
config_version: config.config_version,
|
|
3415
3561
|
config_task_id: config.config_task_id,
|
|
3416
3562
|
concurrency: config.concurrency,
|
|
3563
|
+
max_retry_attempts: config.max_retry_attempts ?? 3,
|
|
3564
|
+
retry_backoff_seconds: config.retry_backoff_seconds ?? 30,
|
|
3417
3565
|
role: config.role,
|
|
3418
3566
|
execute_agent: config.execute_agent,
|
|
3419
3567
|
prototype_url: config.prototype_url,
|
|
@@ -3537,6 +3685,135 @@ function createShowTasksCommand(version, deps = {}) {
|
|
|
3537
3685
|
});
|
|
3538
3686
|
}
|
|
3539
3687
|
|
|
3688
|
+
//#endregion
|
|
3689
|
+
//#region src/cli/commands/retry-task.ts
|
|
3690
|
+
function resolveTaskAttempt(value) {
|
|
3691
|
+
if (typeof value === "number" && Number.isInteger(value) && value > 0) return value;
|
|
3692
|
+
return 1;
|
|
3693
|
+
}
|
|
3694
|
+
function createRetryTaskCommand(version, deps = {}) {
|
|
3695
|
+
return createCommand("retry-task").version(version).description("Retry a failed task for an agent").argument("<name>", "The name of the agent.").argument("<task-id>", "The id of the failed task.").argument("[reason]", "Optional retry reason.").option("-y, --yes", "Skip confirmation prompt.").option("-f, --force", "Alias for --yes.").action(async function() {
|
|
3696
|
+
const [name, taskId, reason] = this.args;
|
|
3697
|
+
const options = this.opts();
|
|
3698
|
+
if (!ensureEnv(["DATABASE_URL"])) return;
|
|
3699
|
+
const rl = options.yes || options.force ? null : readline.createInterface({
|
|
3700
|
+
input: process$1.stdin,
|
|
3701
|
+
output: process$1.stdout
|
|
3702
|
+
});
|
|
3703
|
+
try {
|
|
3704
|
+
if (rl) {
|
|
3705
|
+
if ((await rl.question(`Type the task id (${taskId}) to confirm retry: `)).trim() !== taskId) {
|
|
3706
|
+
console.error("Confirmation failed. Task id did not match.");
|
|
3707
|
+
process$1.exitCode = 1;
|
|
3708
|
+
return;
|
|
3709
|
+
}
|
|
3710
|
+
if ((await rl.question("Type RETRY to requeue this failed task: ")).trim() !== "RETRY") {
|
|
3711
|
+
console.error("Confirmation failed. Aborting retry.");
|
|
3712
|
+
process$1.exitCode = 1;
|
|
3713
|
+
return;
|
|
3714
|
+
}
|
|
3715
|
+
}
|
|
3716
|
+
const updatedTask = await (deps.retryTask ?? retryTask)(name, taskId, reason);
|
|
3717
|
+
if (!updatedTask) {
|
|
3718
|
+
console.error(`Task ${taskId} not found for agent ${name}.`);
|
|
3719
|
+
process$1.exitCode = 1;
|
|
3720
|
+
return;
|
|
3721
|
+
}
|
|
3722
|
+
if (updatedTask.status !== "pending") {
|
|
3723
|
+
console.error(`Unexpected result: task ${taskId} status is ${updatedTask.status} after retry.`);
|
|
3724
|
+
process$1.exitCode = 1;
|
|
3725
|
+
return;
|
|
3726
|
+
}
|
|
3727
|
+
const attempt = resolveTaskAttempt(updatedTask.attempt_count);
|
|
3728
|
+
console.log(`Requeued task ${taskId} for agent ${name} as attempt ${attempt}, next run at ${updatedTask.queued_at.toISOString()}.${reason ? ` Reason: ${reason}` : ""}`);
|
|
3729
|
+
} catch (error) {
|
|
3730
|
+
console.error(error instanceof Error ? error.message : String(error));
|
|
3731
|
+
process$1.exitCode = 1;
|
|
3732
|
+
} finally {
|
|
3733
|
+
rl?.close();
|
|
3734
|
+
}
|
|
3735
|
+
});
|
|
3736
|
+
}
|
|
3737
|
+
|
|
3738
|
+
//#endregion
|
|
3739
|
+
//#region src/cli/commands/skill-sh.ts
|
|
3740
|
+
const EMBEDDED_SKILL_MARKDOWN = "---\nname: pantheon-agents\ndescription: \"Pantheon Agents CLI Usage\"\n---\n\n> Pantheon is the project name of an internal VM environment management project. Do not guess any usage from the name.\n\n\n## Pantheon Agents CLI Usage\n\n### Concepts\n\n- agent: Each agent has a role (like `developer`) and several skills to use.\n- environment: The context of tools, codes or anything else for agents to run tasks. You don't need to know the details.\n- tasks: tasks for each agent to run, each task will create a new environment.\n\n#### Task structure\n\n- agent: The agent to executing the task\n- task (text): The task content\n- attempt: Attempt number (starts from 1 and increases on retries)\n- parent_task_id: The parent task id\n- status: `pending`, `running`, `completed`, `failed`, `canceled`\n- queued_at\n- started_at\n- ended_at\n- finished_at\n- canceled_at\n- cancel_reason\n- output: The output of the completed task.\n- error: The last known error message (failed task error is kept across retries).\n\n#### Other internal implementation details you don't need to know or use\n\n- project\n- branches\n\n### CLI Commands\n\n**You can only use the CLI commands listed below.**\n\n#### Setup a base environment for an agent:\n\n```shell\n# Use default setup script at first time configuration\npantheon-agents config 'Kris' \\\n --role 'developer' \\\n --skills pantheon-issue-resolve,pantheon-solution-design \\\n --max-retry-attempts 3 \\\n --retry-backoff-seconds 30\n\n# Add some custom prompts\npantheon-agents config 'Kris' \\\n --role 'developer' \\\n --skills pantheon-issue-resolve,pantheon-solution-design \\\n 'Update tools to the latest version.'\n```\n\n#### Update agent's environment\n\n##### Configure agent environment by prompt\n\n```shell\npantheon-agents config 'Kris' 'Install some tools'\n```\n\n##### Configure agent to a task result environment\n\npantheon-agents config 'Kris' --task-id 42\n\n#### Enqueue tasks\n\n```shell\npantheon-agents add-task 'Kris' 'Some awesome task'\n ```\n\nWhen adding tasks, cli will output the task id. You can add tasks with `--parent-task-id` to create a task hierarchy.\n\n```shell\npantheon-agents add-task 'Kris' 'Some awesome task' --parent-task-id 42\n```\n\n\n#### List tasks\n\n```shell\npantheon-agents show-tasks --json 'Kris'\n```\n\n\n#### Show single task info\n\n```shell\npantheon-agents get-task 'Kris' 42\n```\n\nReturns task info using this task structure (without internal project/branch fields):\n\n```json\n{\n \"agent\": \"Kris\",\n \"task\": \"Some awesome task\",\n \"attempt\": 1,\n \"parent_task_id\": \"41\",\n \"status\": \"completed\",\n \"queued_at\": \"2026-02-12T00:00:00.000Z\",\n \"started_at\": \"2026-02-12T00:00:05.000Z\",\n \"ended_at\": \"2026-02-12T00:05:00.000Z\",\n \"finished_at\": \"2026-02-12T00:05:00.000Z\",\n \"canceled_at\": null,\n \"cancel_reason\": null,\n \"output\": \"Task output\",\n \"error\": null\n}\n```\n\n#### Cancel task\n\n```shell\npantheon-agents cancel-task 'Kris' 42 'Reason'\n```\n\n#### Retry failed task\n\n```shell\n# Manual retry (immediate requeue and increases attempt)\npantheon-agents retry-task 'Kris' 42 'Reason'\n```\n\n#### Print embedded skill markdown\n\n```shell\npantheon-agents skill.sh\n```\n";
|
|
3741
|
+
function createSkillShCommand(version) {
|
|
3742
|
+
return createCommand("skill.sh").version(version).description("Print embedded pantheon-agents skill markdown").action(() => {
|
|
3743
|
+
process$1.stdout.write(EMBEDDED_SKILL_MARKDOWN);
|
|
3744
|
+
});
|
|
3745
|
+
}
|
|
3746
|
+
|
|
3747
|
+
//#endregion
|
|
3748
|
+
//#region src/cli/commands/gen-migration-sql.ts
|
|
3749
|
+
const introducedInPattern = /^--\s*introduced_in:\s*([0-9]+\.[0-9]+\.[0-9]+)\s*$/m;
|
|
3750
|
+
function parseProvider(value) {
|
|
3751
|
+
if (value === "tidb" || value === "db9") return value;
|
|
3752
|
+
throw new InvalidArgumentError("provider must be one of: tidb, db9");
|
|
3753
|
+
}
|
|
3754
|
+
function parseSemver(value) {
|
|
3755
|
+
const matched = value.trim().match(/^(\d+)\.(\d+)\.(\d+)$/);
|
|
3756
|
+
if (!matched) return null;
|
|
3757
|
+
return [
|
|
3758
|
+
Number(matched[1]),
|
|
3759
|
+
Number(matched[2]),
|
|
3760
|
+
Number(matched[3])
|
|
3761
|
+
];
|
|
3762
|
+
}
|
|
3763
|
+
function parseSemverOption(value) {
|
|
3764
|
+
if (parseSemver(value) == null) throw new InvalidArgumentError("from must be a semver like 0.3.0");
|
|
3765
|
+
return value;
|
|
3766
|
+
}
|
|
3767
|
+
function compareSemver(a, b) {
|
|
3768
|
+
const pa = parseSemver(a);
|
|
3769
|
+
const pb = parseSemver(b);
|
|
3770
|
+
if (!pa || !pb) throw new Error(`Invalid semver comparison: '${a}' vs '${b}'`);
|
|
3771
|
+
for (let i = 0; i < 3; i++) {
|
|
3772
|
+
if (pa[i] > pb[i]) return 1;
|
|
3773
|
+
if (pa[i] < pb[i]) return -1;
|
|
3774
|
+
}
|
|
3775
|
+
return 0;
|
|
3776
|
+
}
|
|
3777
|
+
function resolveMigrationDir(provider, cwd) {
|
|
3778
|
+
const moduleDir = path.dirname(fileURLToPath(import.meta.url));
|
|
3779
|
+
const candidates = [
|
|
3780
|
+
path.resolve(cwd, "src/db/migrations", provider),
|
|
3781
|
+
path.resolve(cwd, "packages/agents/src/db/migrations", provider),
|
|
3782
|
+
path.resolve(moduleDir, "db/migrations", provider),
|
|
3783
|
+
path.resolve(moduleDir, "../db/migrations", provider),
|
|
3784
|
+
path.resolve(moduleDir, "../../db/migrations", provider)
|
|
3785
|
+
];
|
|
3786
|
+
for (const candidate of candidates) if (fs.existsSync(candidate) && fs.statSync(candidate).isDirectory()) return candidate;
|
|
3787
|
+
throw new Error(`Migration directory not found for provider '${provider}'. Checked: ${candidates.join(", ")}`);
|
|
3788
|
+
}
|
|
3789
|
+
function loadProviderMigrations(provider, options = {}) {
|
|
3790
|
+
const migrationDir = resolveMigrationDir(provider, options.cwd ?? process$1.cwd());
|
|
3791
|
+
return fs.readdirSync(migrationDir).filter((name) => name.endsWith(".sql")).sort((a, b) => a.localeCompare(b)).map((fileName) => {
|
|
3792
|
+
const filePath = path.join(migrationDir, fileName);
|
|
3793
|
+
const sql = fs.readFileSync(filePath, "utf8");
|
|
3794
|
+
const introduced = sql.match(introducedInPattern)?.[1];
|
|
3795
|
+
if (!introduced) throw new Error(`Migration file '${fileName}' missing required header '-- introduced_in: x.y.z'.`);
|
|
3796
|
+
return {
|
|
3797
|
+
fileName,
|
|
3798
|
+
introducedIn: introduced,
|
|
3799
|
+
sql
|
|
3800
|
+
};
|
|
3801
|
+
});
|
|
3802
|
+
}
|
|
3803
|
+
function getMigrationSql(provider, fromVersion, options = {}) {
|
|
3804
|
+
if (!parseSemver(fromVersion)) throw new Error(`Invalid from version '${fromVersion}'. Expected x.y.z`);
|
|
3805
|
+
const migrations = loadProviderMigrations(provider, options).filter((migration) => compareSemver(migration.introducedIn, fromVersion) > 0);
|
|
3806
|
+
if (migrations.length === 0) return `-- No migrations for provider '${provider}' since ${fromVersion}.\n`;
|
|
3807
|
+
return migrations.map((migration) => `-- migration: ${migration.fileName}\n${migration.sql.trimEnd()}\n`).join("\n");
|
|
3808
|
+
}
|
|
3809
|
+
function createGenMigrationSqlCommand(version, deps = {}) {
|
|
3810
|
+
return createCommand("gen-migration-sql").version(version).description("Print SQL migrations since a given package version").requiredOption("--provider <provider>", "Database provider: tidb or db9.", parseProvider).requiredOption("--from <version>", "Print migrations introduced after this version.", parseSemverOption).action(async function() {
|
|
3811
|
+
const options = this.opts();
|
|
3812
|
+
const output = (deps.getMigrationSql ?? getMigrationSql)(options.provider, options.from);
|
|
3813
|
+
process$1.stdout.write(output);
|
|
3814
|
+
});
|
|
3815
|
+
}
|
|
3816
|
+
|
|
3540
3817
|
//#endregion
|
|
3541
3818
|
//#region ../agent-stream-parser/src/utils.ts
|
|
3542
3819
|
function isRecord(value) {
|
|
@@ -5962,7 +6239,7 @@ function createWatchStreamCommand(version) {
|
|
|
5962
6239
|
|
|
5963
6240
|
//#endregion
|
|
5964
6241
|
//#region src/cli/index.ts
|
|
5965
|
-
const program = new Command().name("pantheon-agents").description("Pantheon agents CLI").version(version).showHelpAfterError().showSuggestionAfterError().addHelpCommand().addCommand(createAddTaskCommand(version)).addCommand(createConfigAgentCommand(version)).addCommand(createDeleteTaskCommand(version)).addCommand(createCancelTaskCommand(version)).addCommand(createGetTaskCommand(version)).addCommand(createRunAgentCommand(version)).addCommand(createShowConfigCommand(version)).addCommand(createShowTasksCommand(version)).addCommand(createWatchCommand(version)).addCommand(createWatchStreamCommand(version));
|
|
6242
|
+
const program = new Command().name("pantheon-agents").description("Pantheon agents CLI").version(version).showHelpAfterError().showSuggestionAfterError().addHelpCommand().addCommand(createAddTaskCommand(version)).addCommand(createConfigAgentCommand(version)).addCommand(createDeleteTaskCommand(version)).addCommand(createCancelTaskCommand(version)).addCommand(createGetTaskCommand(version)).addCommand(createRunAgentCommand(version)).addCommand(createRetryTaskCommand(version)).addCommand(createSkillShCommand(version)).addCommand(createGenMigrationSqlCommand(version)).addCommand(createShowConfigCommand(version)).addCommand(createShowTasksCommand(version)).addCommand(createWatchCommand(version)).addCommand(createWatchStreamCommand(version));
|
|
5966
6243
|
async function main() {
|
|
5967
6244
|
if (process$1.argv.length <= 2) {
|
|
5968
6245
|
program.outputHelp();
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pantheon.ai/agents",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.3.
|
|
4
|
+
"version": "0.3.1",
|
|
5
5
|
"bin": {
|
|
6
6
|
"pantheon-agents": "dist/index.js"
|
|
7
7
|
},
|
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
"access": "public"
|
|
13
13
|
},
|
|
14
14
|
"scripts": {
|
|
15
|
-
"build": "rolldown -c rolldown.config.ts && chmod +x dist/index.js",
|
|
15
|
+
"build": "rolldown -c rolldown.config.ts && mkdir -p dist/db && cp -R src/db/migrations dist/db/ && chmod +x dist/index.js",
|
|
16
16
|
"typecheck": "tsc --noEmit --skipLibCheck --allowImportingTsExtensions --lib ESNext,DOM,DOM.Iterable",
|
|
17
17
|
"test": "bun test",
|
|
18
18
|
"test:coverage": "bun test --coverage",
|