@biaoo/tiangong-wiki 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +167 -0
- package/README.zh-CN.md +167 -0
- package/SKILL.md +116 -0
- package/agents/openai.yaml +4 -0
- package/assets/config.example.env +18 -0
- package/assets/templates/achievement.md +32 -0
- package/assets/templates/bridge.md +33 -0
- package/assets/templates/concept.md +47 -0
- package/assets/templates/faq.md +31 -0
- package/assets/templates/lesson.md +31 -0
- package/assets/templates/method.md +31 -0
- package/assets/templates/misconception.md +35 -0
- package/assets/templates/person.md +31 -0
- package/assets/templates/research-note.md +34 -0
- package/assets/templates/resume.md +34 -0
- package/assets/templates/source-summary.md +35 -0
- package/assets/vllm/qwen3_5_openai_developer.jinja +182 -0
- package/assets/wiki.config.default.json +193 -0
- package/dist/commands/check-config.js +77 -0
- package/dist/commands/create.js +32 -0
- package/dist/commands/daemon.js +186 -0
- package/dist/commands/dashboard.js +112 -0
- package/dist/commands/doctor.js +22 -0
- package/dist/commands/export-graph.js +28 -0
- package/dist/commands/export-index.js +31 -0
- package/dist/commands/find.js +36 -0
- package/dist/commands/fts.js +32 -0
- package/dist/commands/graph.js +35 -0
- package/dist/commands/init.js +48 -0
- package/dist/commands/lint.js +35 -0
- package/dist/commands/list.js +28 -0
- package/dist/commands/page-info.js +24 -0
- package/dist/commands/search.js +32 -0
- package/dist/commands/setup.js +15 -0
- package/dist/commands/stat.js +20 -0
- package/dist/commands/sync.js +38 -0
- package/dist/commands/template.js +71 -0
- package/dist/commands/type.js +88 -0
- package/dist/commands/vault.js +64 -0
- package/dist/core/agent.js +201 -0
- package/dist/core/cli-env.js +129 -0
- package/dist/core/codex-workflow.js +233 -0
- package/dist/core/config.js +126 -0
- package/dist/core/db.js +292 -0
- package/dist/core/embedding.js +104 -0
- package/dist/core/frontmatter.js +287 -0
- package/dist/core/indexer.js +241 -0
- package/dist/core/onboarding.js +967 -0
- package/dist/core/page-files.js +91 -0
- package/dist/core/paths.js +161 -0
- package/dist/core/presenters.js +23 -0
- package/dist/core/query.js +58 -0
- package/dist/core/runtime.js +20 -0
- package/dist/core/sync.js +235 -0
- package/dist/core/synology.js +412 -0
- package/dist/core/template-evolution.js +38 -0
- package/dist/core/vault-processing.js +742 -0
- package/dist/core/vault.js +594 -0
- package/dist/core/workflow-context.js +188 -0
- package/dist/core/workflow-result.js +162 -0
- package/dist/core/workspace-bootstrap.js +30 -0
- package/dist/core/workspace-skills.js +220 -0
- package/dist/daemon/client.js +147 -0
- package/dist/daemon/server.js +807 -0
- package/dist/daemon/state.js +53 -0
- package/dist/dashboard/assets/index-1FgAUZ28.css +1 -0
- package/dist/dashboard/assets/index-6A0PWT4X.js +154 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-400-normal-BEIGL1Tu.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-400-normal-ugxPyKxw.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-500-normal-DJqRU3vO.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-500-normal-DmUKJPL_.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-700-normal-BWTpRfYl.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-700-normal-CEoEElIJ.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-400-normal-B9oWc5Lo.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-400-normal-C190GLew.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-500-normal-D7SFKleX.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-500-normal-JpySY46c.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-700-normal-C6CZE3T8.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-700-normal-DEigVDxa.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-400-normal-6-qcROiO.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-400-normal-V6pRDFza.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-500-normal-BWZEU5yA.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-500-normal-CJOVTJB7.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-700-normal-BYuf6tUa.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-700-normal-D3wTyLJW.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-400-normal-Bc8Ftmh3.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-400-normal-fXTG6kC5.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-500-normal-Cut-4mMH.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-500-normal-ckzbgY84.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-700-normal-CZipNAKV.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-700-normal-CxPITLHs.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-vietnamese-400-normal-CqNFfHCs.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-vietnamese-500-normal-DNRqzVM1.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-vietnamese-700-normal-BDLVIk2r.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-400-normal-BnQMeOim.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-400-normal-CJ-V5oYT.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-500-normal-CNSSEhBt.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-500-normal-lFbtlQH6.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-700-normal-CwsQ-cCU.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-700-normal-RjhwGPKo.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-400-normal-CfP_5XZW.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-400-normal-DRPE3kg4.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-500-normal-3dgZTiw9.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-500-normal-DUe3BAxM.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-700-normal-BQnZhY3m.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-700-normal-HVCqSBdx.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-400-normal-B7xT_GF5.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-400-normal-BIWiOVfw.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-500-normal-BTqKIpxg.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-500-normal-BmEvtly_.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-700-normal-DMty7AZE.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-700-normal-Duxec5Rn.woff +0 -0
- package/dist/dashboard/index.html +18 -0
- package/dist/index.js +86 -0
- package/dist/operations/dashboard.js +1231 -0
- package/dist/operations/export.js +110 -0
- package/dist/operations/query.js +649 -0
- package/dist/operations/type-template.js +210 -0
- package/dist/operations/write.js +143 -0
- package/dist/types/config.js +1 -0
- package/dist/types/page.js +1 -0
- package/dist/utils/case.js +22 -0
- package/dist/utils/errors.js +26 -0
- package/dist/utils/fs.js +77 -0
- package/dist/utils/output.js +33 -0
- package/dist/utils/process.js +60 -0
- package/dist/utils/segmenter.js +24 -0
- package/dist/utils/slug.js +10 -0
- package/dist/utils/time.js +24 -0
- package/package.json +64 -0
- package/references/cli-interface.md +312 -0
- package/references/env.md +122 -0
- package/references/template-design-guide.md +271 -0
- package/references/vault-to-wiki-instruction.md +110 -0
- package/references/wiki-maintenance-instruction.md +190 -0
|
@@ -0,0 +1,742 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { CODEX_WORKFLOW_VERSION, createDefaultWorkflowRunner, } from "./codex-workflow.js";
|
|
3
|
+
import { loadConfig } from "./config.js";
|
|
4
|
+
import { openDb } from "./db.js";
|
|
5
|
+
import { resolveAgentSettings, resolveRuntimePaths } from "./paths.js";
|
|
6
|
+
import { assertTemplateEvolutionAllowed, resolveTemplateEvolutionSettings } from "./template-evolution.js";
|
|
7
|
+
import { ensureLocalVaultFile } from "./vault.js";
|
|
8
|
+
import { buildVaultWorkflowPrompt, ensureWorkflowArtifactSet, getWorkflowArtifactSet, } from "./workflow-context.js";
|
|
9
|
+
import { readWorkflowResult } from "./workflow-result.js";
|
|
10
|
+
import { AppError } from "../utils/errors.js";
|
|
11
|
+
import { readTextFileSync } from "../utils/fs.js";
|
|
12
|
+
import { toOffsetIso } from "../utils/time.js";
|
|
13
|
+
const INLINE_WORKFLOW_ATTEMPTS = 2;
|
|
14
|
+
function buildFileIdFilterClause(filterFileIds) {
|
|
15
|
+
if (!filterFileIds || filterFileIds.length === 0) {
|
|
16
|
+
return { clause: "", params: [] };
|
|
17
|
+
}
|
|
18
|
+
return {
|
|
19
|
+
clause: ` AND vault_processing_queue.file_id IN (${filterFileIds.map(() => "?").join(", ")})`,
|
|
20
|
+
params: filterFileIds,
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
function parseOptionalStringArray(value) {
|
|
24
|
+
if (Array.isArray(value)) {
|
|
25
|
+
return value
|
|
26
|
+
.map((entry) => String(entry ?? "").trim())
|
|
27
|
+
.filter(Boolean);
|
|
28
|
+
}
|
|
29
|
+
if (typeof value !== "string" || !value.trim()) {
|
|
30
|
+
return [];
|
|
31
|
+
}
|
|
32
|
+
try {
|
|
33
|
+
const parsed = JSON.parse(value);
|
|
34
|
+
if (!Array.isArray(parsed)) {
|
|
35
|
+
return [];
|
|
36
|
+
}
|
|
37
|
+
return parsed
|
|
38
|
+
.map((entry) => String(entry ?? "").trim())
|
|
39
|
+
.filter(Boolean);
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
return [];
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
function mapQueueRow(row) {
|
|
46
|
+
return {
|
|
47
|
+
fileId: String(row.fileId),
|
|
48
|
+
status: row.status,
|
|
49
|
+
priority: Number(row.priority ?? 0),
|
|
50
|
+
queuedAt: String(row.queuedAt),
|
|
51
|
+
claimedAt: typeof row.claimedAt === "string" ? row.claimedAt : null,
|
|
52
|
+
startedAt: typeof row.startedAt === "string" ? row.startedAt : null,
|
|
53
|
+
processedAt: typeof row.processedAt === "string" ? row.processedAt : null,
|
|
54
|
+
resultPageId: typeof row.resultPageId === "string" ? row.resultPageId : null,
|
|
55
|
+
errorMessage: typeof row.errorMessage === "string" ? row.errorMessage : null,
|
|
56
|
+
attempts: Number(row.attempts ?? 0),
|
|
57
|
+
threadId: typeof row.threadId === "string" ? row.threadId : null,
|
|
58
|
+
workflowVersion: typeof row.workflowVersion === "string" ? row.workflowVersion : null,
|
|
59
|
+
decision: typeof row.decision === "string" ? row.decision : null,
|
|
60
|
+
resultManifestPath: typeof row.resultManifestPath === "string" ? row.resultManifestPath : null,
|
|
61
|
+
lastErrorAt: typeof row.lastErrorAt === "string" ? row.lastErrorAt : null,
|
|
62
|
+
retryAfter: typeof row.retryAfter === "string" ? row.retryAfter : null,
|
|
63
|
+
createdPageIds: parseOptionalStringArray(row.createdPageIds),
|
|
64
|
+
updatedPageIds: parseOptionalStringArray(row.updatedPageIds),
|
|
65
|
+
appliedTypeNames: parseOptionalStringArray(row.appliedTypeNames),
|
|
66
|
+
proposedTypeNames: parseOptionalStringArray(row.proposedTypeNames),
|
|
67
|
+
skillsUsed: parseOptionalStringArray(row.skillsUsed),
|
|
68
|
+
fileName: typeof row.fileName === "string" ? row.fileName : undefined,
|
|
69
|
+
fileExt: typeof row.fileExt === "string" ? row.fileExt : null,
|
|
70
|
+
sourceType: typeof row.sourceType === "string" ? row.sourceType : null,
|
|
71
|
+
fileSize: typeof row.fileSize === "number" ? row.fileSize : undefined,
|
|
72
|
+
filePath: typeof row.filePath === "string" ? row.filePath : undefined,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
function claimQueueItems(db, limit, filterFileIds) {
|
|
76
|
+
const filter = buildFileIdFilterClause(filterFileIds);
|
|
77
|
+
const select = db.prepare(`
|
|
78
|
+
SELECT
|
|
79
|
+
file_id AS fileId,
|
|
80
|
+
status,
|
|
81
|
+
priority,
|
|
82
|
+
queued_at AS queuedAt,
|
|
83
|
+
claimed_at AS claimedAt,
|
|
84
|
+
started_at AS startedAt,
|
|
85
|
+
processed_at AS processedAt,
|
|
86
|
+
result_page_id AS resultPageId,
|
|
87
|
+
error_message AS errorMessage,
|
|
88
|
+
attempts,
|
|
89
|
+
thread_id AS threadId,
|
|
90
|
+
workflow_version AS workflowVersion,
|
|
91
|
+
decision,
|
|
92
|
+
result_manifest_path AS resultManifestPath,
|
|
93
|
+
last_error_at AS lastErrorAt,
|
|
94
|
+
retry_after AS retryAfter,
|
|
95
|
+
created_page_ids AS createdPageIds,
|
|
96
|
+
updated_page_ids AS updatedPageIds,
|
|
97
|
+
applied_type_names AS appliedTypeNames,
|
|
98
|
+
proposed_type_names AS proposedTypeNames,
|
|
99
|
+
skills_used AS skillsUsed,
|
|
100
|
+
vault_files.file_name AS fileName,
|
|
101
|
+
vault_files.file_ext AS fileExt,
|
|
102
|
+
vault_files.source_type AS sourceType,
|
|
103
|
+
vault_files.file_size AS fileSize,
|
|
104
|
+
vault_files.file_path AS filePath
|
|
105
|
+
FROM vault_processing_queue
|
|
106
|
+
LEFT JOIN vault_files ON vault_files.id = vault_processing_queue.file_id
|
|
107
|
+
WHERE status IN ('pending', 'error')${filter.clause}
|
|
108
|
+
ORDER BY priority DESC, queued_at ASC
|
|
109
|
+
LIMIT ?
|
|
110
|
+
`);
|
|
111
|
+
const markProcessing = db.prepare(`
|
|
112
|
+
UPDATE vault_processing_queue
|
|
113
|
+
SET
|
|
114
|
+
status = 'processing',
|
|
115
|
+
claimed_at = @claimed_at,
|
|
116
|
+
started_at = @started_at,
|
|
117
|
+
error_message = NULL
|
|
118
|
+
WHERE file_id = @file_id AND status IN ('pending', 'error')
|
|
119
|
+
`);
|
|
120
|
+
return db.transaction((claimLimit, claimFilterParams) => {
|
|
121
|
+
const startedAt = toOffsetIso();
|
|
122
|
+
const items = select.all(...claimFilterParams, claimLimit).map(mapQueueRow);
|
|
123
|
+
for (const item of items) {
|
|
124
|
+
markProcessing.run({
|
|
125
|
+
file_id: item.fileId,
|
|
126
|
+
claimed_at: startedAt,
|
|
127
|
+
started_at: startedAt,
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
return items.map((item) => ({
|
|
131
|
+
...item,
|
|
132
|
+
claimedAt: startedAt,
|
|
133
|
+
startedAt,
|
|
134
|
+
}));
|
|
135
|
+
})(limit, filter.params);
|
|
136
|
+
}
|
|
137
|
+
function fetchQueueItemsByStatus(db, status) {
|
|
138
|
+
const rows = db.prepare(`
|
|
139
|
+
SELECT
|
|
140
|
+
file_id AS fileId,
|
|
141
|
+
status,
|
|
142
|
+
priority,
|
|
143
|
+
queued_at AS queuedAt,
|
|
144
|
+
claimed_at AS claimedAt,
|
|
145
|
+
started_at AS startedAt,
|
|
146
|
+
processed_at AS processedAt,
|
|
147
|
+
result_page_id AS resultPageId,
|
|
148
|
+
error_message AS errorMessage,
|
|
149
|
+
attempts,
|
|
150
|
+
thread_id AS threadId,
|
|
151
|
+
workflow_version AS workflowVersion,
|
|
152
|
+
decision,
|
|
153
|
+
result_manifest_path AS resultManifestPath,
|
|
154
|
+
last_error_at AS lastErrorAt,
|
|
155
|
+
retry_after AS retryAfter,
|
|
156
|
+
created_page_ids AS createdPageIds,
|
|
157
|
+
updated_page_ids AS updatedPageIds,
|
|
158
|
+
applied_type_names AS appliedTypeNames,
|
|
159
|
+
proposed_type_names AS proposedTypeNames,
|
|
160
|
+
skills_used AS skillsUsed,
|
|
161
|
+
vault_files.file_name AS fileName,
|
|
162
|
+
vault_files.file_ext AS fileExt,
|
|
163
|
+
vault_files.source_type AS sourceType,
|
|
164
|
+
vault_files.file_size AS fileSize,
|
|
165
|
+
vault_files.file_path AS filePath
|
|
166
|
+
FROM vault_processing_queue
|
|
167
|
+
LEFT JOIN vault_files ON vault_files.id = vault_processing_queue.file_id
|
|
168
|
+
${status ? "WHERE status = ?" : ""}
|
|
169
|
+
ORDER BY priority DESC, queued_at ASC
|
|
170
|
+
`).all(...(status ? [status] : []));
|
|
171
|
+
return rows.map(mapQueueRow);
|
|
172
|
+
}
|
|
173
|
+
function fetchQueueItemByFileId(db, fileId) {
|
|
174
|
+
const row = db.prepare(`
|
|
175
|
+
SELECT
|
|
176
|
+
file_id AS fileId,
|
|
177
|
+
status,
|
|
178
|
+
priority,
|
|
179
|
+
queued_at AS queuedAt,
|
|
180
|
+
claimed_at AS claimedAt,
|
|
181
|
+
started_at AS startedAt,
|
|
182
|
+
processed_at AS processedAt,
|
|
183
|
+
result_page_id AS resultPageId,
|
|
184
|
+
error_message AS errorMessage,
|
|
185
|
+
attempts,
|
|
186
|
+
thread_id AS threadId,
|
|
187
|
+
workflow_version AS workflowVersion,
|
|
188
|
+
decision,
|
|
189
|
+
result_manifest_path AS resultManifestPath,
|
|
190
|
+
last_error_at AS lastErrorAt,
|
|
191
|
+
retry_after AS retryAfter,
|
|
192
|
+
created_page_ids AS createdPageIds,
|
|
193
|
+
updated_page_ids AS updatedPageIds,
|
|
194
|
+
applied_type_names AS appliedTypeNames,
|
|
195
|
+
proposed_type_names AS proposedTypeNames,
|
|
196
|
+
skills_used AS skillsUsed,
|
|
197
|
+
vault_files.file_name AS fileName,
|
|
198
|
+
vault_files.file_ext AS fileExt,
|
|
199
|
+
vault_files.source_type AS sourceType,
|
|
200
|
+
vault_files.file_size AS fileSize,
|
|
201
|
+
vault_files.file_path AS filePath
|
|
202
|
+
FROM vault_processing_queue
|
|
203
|
+
LEFT JOIN vault_files ON vault_files.id = vault_processing_queue.file_id
|
|
204
|
+
WHERE vault_processing_queue.file_id = ?
|
|
205
|
+
`).get(fileId);
|
|
206
|
+
return row ? mapQueueRow(row) : null;
|
|
207
|
+
}
|
|
208
|
+
function fetchVaultFile(db, fileId) {
|
|
209
|
+
const row = db.prepare(`
|
|
210
|
+
SELECT
|
|
211
|
+
id,
|
|
212
|
+
file_name AS fileName,
|
|
213
|
+
file_ext AS fileExt,
|
|
214
|
+
source_type AS sourceType,
|
|
215
|
+
file_size AS fileSize,
|
|
216
|
+
file_path AS filePath,
|
|
217
|
+
content_hash AS contentHash,
|
|
218
|
+
file_mtime AS fileMtime,
|
|
219
|
+
indexed_at AS indexedAt
|
|
220
|
+
FROM vault_files
|
|
221
|
+
WHERE id = ?
|
|
222
|
+
`).get(fileId);
|
|
223
|
+
return row ?? null;
|
|
224
|
+
}
|
|
225
|
+
function updateQueueStatus(db, fileId, payload) {
|
|
226
|
+
db.prepare(`
|
|
227
|
+
UPDATE vault_processing_queue
|
|
228
|
+
SET
|
|
229
|
+
status = @status,
|
|
230
|
+
processed_at = @processed_at,
|
|
231
|
+
result_page_id = COALESCE(@result_page_id, result_page_id),
|
|
232
|
+
error_message = @error_message,
|
|
233
|
+
attempts = CASE WHEN @increment_attempts = 1 THEN attempts + 1 ELSE attempts END
|
|
234
|
+
WHERE file_id = @file_id
|
|
235
|
+
`).run({
|
|
236
|
+
file_id: fileId,
|
|
237
|
+
status: payload.status,
|
|
238
|
+
processed_at: payload.processedAt,
|
|
239
|
+
result_page_id: payload.resultPageId ?? null,
|
|
240
|
+
error_message: payload.errorMessage ?? null,
|
|
241
|
+
increment_attempts: payload.incrementAttempts ? 1 : 0,
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
function updateQueueWorkflowTracking(db, fileId, payload) {
|
|
245
|
+
db.prepare(`
|
|
246
|
+
UPDATE vault_processing_queue
|
|
247
|
+
SET
|
|
248
|
+
thread_id = @thread_id,
|
|
249
|
+
workflow_version = @workflow_version,
|
|
250
|
+
result_manifest_path = @result_manifest_path
|
|
251
|
+
WHERE file_id = @file_id
|
|
252
|
+
`).run({
|
|
253
|
+
file_id: fileId,
|
|
254
|
+
thread_id: payload.threadId,
|
|
255
|
+
workflow_version: CODEX_WORKFLOW_VERSION,
|
|
256
|
+
result_manifest_path: payload.resultManifestPath,
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
function serializeArray(value) {
|
|
260
|
+
return JSON.stringify(value);
|
|
261
|
+
}
|
|
262
|
+
function formatManifestLogFields(manifest) {
|
|
263
|
+
return `decision=${manifest.decision} skills=${manifest.skillsUsed.join(",") || "-"} created=${manifest.createdPageIds.join(",") || "-"} updated=${manifest.updatedPageIds.join(",") || "-"} proposed=${manifest.proposedTypes.map((item) => item.name).join(",") || "-"}`;
|
|
264
|
+
}
|
|
265
|
+
function applyWorkflowManifest(db, fileId, manifest, resultManifestPath) {
|
|
266
|
+
const resultPageId = manifest.createdPageIds[0] ?? manifest.updatedPageIds[0] ?? null;
|
|
267
|
+
const status = manifest.status;
|
|
268
|
+
const processedAt = toOffsetIso();
|
|
269
|
+
db.prepare(`
|
|
270
|
+
UPDATE vault_processing_queue
|
|
271
|
+
SET
|
|
272
|
+
status = @status,
|
|
273
|
+
processed_at = @processed_at,
|
|
274
|
+
result_page_id = @result_page_id,
|
|
275
|
+
error_message = NULL,
|
|
276
|
+
workflow_version = @workflow_version,
|
|
277
|
+
decision = @decision,
|
|
278
|
+
result_manifest_path = @result_manifest_path,
|
|
279
|
+
last_error_at = NULL,
|
|
280
|
+
retry_after = NULL,
|
|
281
|
+
created_page_ids = @created_page_ids,
|
|
282
|
+
updated_page_ids = @updated_page_ids,
|
|
283
|
+
applied_type_names = @applied_type_names,
|
|
284
|
+
proposed_type_names = @proposed_type_names,
|
|
285
|
+
skills_used = @skills_used
|
|
286
|
+
WHERE file_id = @file_id
|
|
287
|
+
`).run({
|
|
288
|
+
file_id: fileId,
|
|
289
|
+
status,
|
|
290
|
+
processed_at: processedAt,
|
|
291
|
+
result_page_id: resultPageId,
|
|
292
|
+
workflow_version: CODEX_WORKFLOW_VERSION,
|
|
293
|
+
decision: manifest.decision,
|
|
294
|
+
result_manifest_path: resultManifestPath,
|
|
295
|
+
created_page_ids: serializeArray(manifest.createdPageIds),
|
|
296
|
+
updated_page_ids: serializeArray(manifest.updatedPageIds),
|
|
297
|
+
applied_type_names: serializeArray(manifest.appliedTypeNames),
|
|
298
|
+
proposed_type_names: serializeArray(manifest.proposedTypes.map((item) => item.name)),
|
|
299
|
+
skills_used: serializeArray(manifest.skillsUsed),
|
|
300
|
+
});
|
|
301
|
+
return { status, pageId: resultPageId };
|
|
302
|
+
}
|
|
303
|
+
function isInlineRetryCapable(runner) {
|
|
304
|
+
return runner.inlineRetryCapable === true;
|
|
305
|
+
}
|
|
306
|
+
function normalizeWorkflowErrorDetail(value) {
|
|
307
|
+
if (typeof value === "string") {
|
|
308
|
+
const normalized = value.trim();
|
|
309
|
+
return normalized ? normalized : null;
|
|
310
|
+
}
|
|
311
|
+
if (value instanceof Error) {
|
|
312
|
+
const normalized = value.message.trim();
|
|
313
|
+
return normalized ? normalized : null;
|
|
314
|
+
}
|
|
315
|
+
return null;
|
|
316
|
+
}
|
|
317
|
+
function formatWorkflowError(error) {
|
|
318
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
319
|
+
if (!(error instanceof AppError)) {
|
|
320
|
+
return message;
|
|
321
|
+
}
|
|
322
|
+
const directDetails = normalizeWorkflowErrorDetail(error.details);
|
|
323
|
+
if (directDetails && directDetails !== message) {
|
|
324
|
+
return `${message}: ${directDetails}`;
|
|
325
|
+
}
|
|
326
|
+
if (!error.details || typeof error.details !== "object" || Array.isArray(error.details)) {
|
|
327
|
+
return message;
|
|
328
|
+
}
|
|
329
|
+
const cause = normalizeWorkflowErrorDetail(error.details.cause);
|
|
330
|
+
if (!cause || cause === message) {
|
|
331
|
+
return message;
|
|
332
|
+
}
|
|
333
|
+
return `${message}: ${cause}`;
|
|
334
|
+
}
|
|
335
|
+
function createWorkflowTimeoutError(phase, timeoutMs) {
|
|
336
|
+
return new AppError(`Workflow ${phase} timed out after ${Math.ceil(timeoutMs / 1000)}s`, "runtime", {
|
|
337
|
+
phase,
|
|
338
|
+
timeoutMs,
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
async function runWithWorkflowTimeout(phase, timeoutMs, controller, run) {
|
|
342
|
+
let timedOut = false;
|
|
343
|
+
const timeoutError = createWorkflowTimeoutError(phase, timeoutMs);
|
|
344
|
+
let timeoutHandle = null;
|
|
345
|
+
try {
|
|
346
|
+
return await Promise.race([
|
|
347
|
+
run(),
|
|
348
|
+
new Promise((_, reject) => {
|
|
349
|
+
timeoutHandle = setTimeout(() => {
|
|
350
|
+
timedOut = true;
|
|
351
|
+
controller.abort(timeoutError);
|
|
352
|
+
reject(timeoutError);
|
|
353
|
+
}, timeoutMs);
|
|
354
|
+
}),
|
|
355
|
+
]);
|
|
356
|
+
}
|
|
357
|
+
catch (error) {
|
|
358
|
+
if (timedOut) {
|
|
359
|
+
throw timeoutError;
|
|
360
|
+
}
|
|
361
|
+
throw error;
|
|
362
|
+
}
|
|
363
|
+
finally {
|
|
364
|
+
if (timeoutHandle) {
|
|
365
|
+
clearTimeout(timeoutHandle);
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
// A runner failure can happen after the agent has already written a final result.json.
|
|
370
|
+
// Recover that manifest instead of blindly re-injecting the same task.
|
|
371
|
+
function readRecoverableWorkflowResult(resultPath, expectedThreadId) {
|
|
372
|
+
if (!resultPath || !expectedThreadId) {
|
|
373
|
+
return null;
|
|
374
|
+
}
|
|
375
|
+
try {
|
|
376
|
+
const manifest = readWorkflowResult(resultPath);
|
|
377
|
+
if (manifest.threadId !== expectedThreadId || manifest.status === "error") {
|
|
378
|
+
return null;
|
|
379
|
+
}
|
|
380
|
+
return manifest;
|
|
381
|
+
}
|
|
382
|
+
catch {
|
|
383
|
+
return null;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
function shouldAttemptManifestRecovery(error) {
|
|
387
|
+
if (error instanceof AppError && error.type === "config") {
|
|
388
|
+
return false;
|
|
389
|
+
}
|
|
390
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
391
|
+
return message.startsWith("Codex workflow ") || message.startsWith("Workflow result ");
|
|
392
|
+
}
|
|
393
|
+
function shouldRetryWorkflowAttempt(error, attempt, maxAttempts) {
|
|
394
|
+
if (attempt >= maxAttempts) {
|
|
395
|
+
return false;
|
|
396
|
+
}
|
|
397
|
+
if (error instanceof AppError && error.type === "config") {
|
|
398
|
+
return false;
|
|
399
|
+
}
|
|
400
|
+
return true;
|
|
401
|
+
}
|
|
402
|
+
function readPersistedWorkflowThreadId(queueItemPath) {
|
|
403
|
+
try {
|
|
404
|
+
const raw = readTextFileSync(queueItemPath).trim();
|
|
405
|
+
if (!raw) {
|
|
406
|
+
return null;
|
|
407
|
+
}
|
|
408
|
+
const parsed = JSON.parse(raw);
|
|
409
|
+
return typeof parsed.threadId === "string" && parsed.threadId.trim() ? parsed.threadId.trim() : null;
|
|
410
|
+
}
|
|
411
|
+
catch {
|
|
412
|
+
return null;
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
function updateQueueWorkflowError(db, fileId, payload) {
|
|
416
|
+
const processedAt = toOffsetIso();
|
|
417
|
+
db.prepare(`
|
|
418
|
+
UPDATE vault_processing_queue
|
|
419
|
+
SET
|
|
420
|
+
status = 'error',
|
|
421
|
+
processed_at = @processed_at,
|
|
422
|
+
error_message = @error_message,
|
|
423
|
+
attempts = attempts + 1,
|
|
424
|
+
started_at = COALESCE(started_at, @processed_at),
|
|
425
|
+
thread_id = COALESCE(@thread_id, thread_id),
|
|
426
|
+
workflow_version = @workflow_version,
|
|
427
|
+
result_manifest_path = COALESCE(@result_manifest_path, result_manifest_path),
|
|
428
|
+
last_error_at = @last_error_at
|
|
429
|
+
WHERE file_id = @file_id
|
|
430
|
+
`).run({
|
|
431
|
+
file_id: fileId,
|
|
432
|
+
processed_at: processedAt,
|
|
433
|
+
error_message: payload.errorMessage.slice(0, 1_000),
|
|
434
|
+
thread_id: payload.threadId ?? null,
|
|
435
|
+
workflow_version: CODEX_WORKFLOW_VERSION,
|
|
436
|
+
result_manifest_path: payload.resultManifestPath ?? null,
|
|
437
|
+
last_error_at: processedAt,
|
|
438
|
+
});
|
|
439
|
+
}
|
|
440
|
+
function prepareCodexWorkflowInput(paths, item, file, localFilePath, env, allowTemplateEvolution) {
|
|
441
|
+
const workspaceRoot = path.resolve(paths.wikiRoot, "..");
|
|
442
|
+
const artifacts = getWorkflowArtifactSet(paths, item.fileId);
|
|
443
|
+
const promptText = buildVaultWorkflowPrompt({
|
|
444
|
+
workspaceRoot,
|
|
445
|
+
vaultFilePath: localFilePath,
|
|
446
|
+
resultJsonPath: artifacts.resultPath,
|
|
447
|
+
allowTemplateEvolution,
|
|
448
|
+
});
|
|
449
|
+
ensureWorkflowArtifactSet(paths, {
|
|
450
|
+
queueItemId: item.fileId,
|
|
451
|
+
queueItem: {
|
|
452
|
+
fileId: item.fileId,
|
|
453
|
+
threadId: item.threadId ?? null,
|
|
454
|
+
workspaceRoot,
|
|
455
|
+
wikiRoot: paths.wikiRoot,
|
|
456
|
+
wikiPath: paths.wikiPath,
|
|
457
|
+
vaultPath: paths.vaultPath,
|
|
458
|
+
localFilePath,
|
|
459
|
+
resultJsonPath: artifacts.resultPath,
|
|
460
|
+
skillArtifactsPath: artifacts.skillArtifactsPath,
|
|
461
|
+
file,
|
|
462
|
+
queue: {
|
|
463
|
+
status: item.status,
|
|
464
|
+
priority: item.priority,
|
|
465
|
+
queuedAt: item.queuedAt,
|
|
466
|
+
attempts: item.attempts,
|
|
467
|
+
},
|
|
468
|
+
},
|
|
469
|
+
promptMarkdown: promptText,
|
|
470
|
+
});
|
|
471
|
+
return {
|
|
472
|
+
artifacts,
|
|
473
|
+
input: {
|
|
474
|
+
queueItemId: item.fileId,
|
|
475
|
+
workspaceRoot,
|
|
476
|
+
packageRoot: paths.packageRoot,
|
|
477
|
+
promptPath: artifacts.promptPath,
|
|
478
|
+
promptText,
|
|
479
|
+
queueItemPath: artifacts.queueItemPath,
|
|
480
|
+
resultPath: artifacts.resultPath,
|
|
481
|
+
skillArtifactsPath: artifacts.skillArtifactsPath,
|
|
482
|
+
model: env.WIKI_AGENT_MODEL ?? null,
|
|
483
|
+
env,
|
|
484
|
+
},
|
|
485
|
+
};
|
|
486
|
+
}
|
|
487
|
+
export function getVaultQueueSnapshot(env = process.env, status) {
|
|
488
|
+
const paths = resolveRuntimePaths(env);
|
|
489
|
+
const config = loadConfig(paths.configPath);
|
|
490
|
+
const { db } = openDb(paths.dbPath, config, Number.parseInt(env.EMBEDDING_DIMENSIONS ?? "384", 10) || 384);
|
|
491
|
+
try {
|
|
492
|
+
const items = fetchQueueItemsByStatus(db, status);
|
|
493
|
+
const counts = db.prepare(`
|
|
494
|
+
SELECT
|
|
495
|
+
SUM(CASE WHEN status = 'pending' THEN 1 ELSE 0 END) AS totalPending,
|
|
496
|
+
SUM(CASE WHEN status = 'processing' THEN 1 ELSE 0 END) AS totalProcessing,
|
|
497
|
+
SUM(CASE WHEN status = 'done' THEN 1 ELSE 0 END) AS totalDone,
|
|
498
|
+
SUM(CASE WHEN status = 'skipped' THEN 1 ELSE 0 END) AS totalSkipped,
|
|
499
|
+
SUM(CASE WHEN status = 'error' THEN 1 ELSE 0 END) AS totalError
|
|
500
|
+
FROM vault_processing_queue
|
|
501
|
+
`).get();
|
|
502
|
+
return {
|
|
503
|
+
items,
|
|
504
|
+
totalPending: counts.totalPending ?? 0,
|
|
505
|
+
totalProcessing: counts.totalProcessing ?? 0,
|
|
506
|
+
totalDone: counts.totalDone ?? 0,
|
|
507
|
+
totalSkipped: counts.totalSkipped ?? 0,
|
|
508
|
+
totalError: counts.totalError ?? 0,
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
finally {
|
|
512
|
+
db.close();
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
export function getVaultQueueItem(env = process.env, fileId) {
|
|
516
|
+
const paths = resolveRuntimePaths(env);
|
|
517
|
+
const config = loadConfig(paths.configPath);
|
|
518
|
+
const { db } = openDb(paths.dbPath, config, Number.parseInt(env.EMBEDDING_DIMENSIONS ?? "384", 10) || 384);
|
|
519
|
+
try {
|
|
520
|
+
return fetchQueueItemByFileId(db, fileId);
|
|
521
|
+
}
|
|
522
|
+
finally {
|
|
523
|
+
db.close();
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
export async function processVaultQueueBatch(env = process.env, options = {}) {
|
|
527
|
+
const agentSettings = resolveAgentSettings(env, { strict: true });
|
|
528
|
+
if (!agentSettings.enabled || agentSettings.batchSize === 0) {
|
|
529
|
+
return {
|
|
530
|
+
enabled: false,
|
|
531
|
+
processed: 0,
|
|
532
|
+
done: 0,
|
|
533
|
+
skipped: 0,
|
|
534
|
+
errored: 0,
|
|
535
|
+
items: [],
|
|
536
|
+
};
|
|
537
|
+
}
|
|
538
|
+
const paths = resolveRuntimePaths(env);
|
|
539
|
+
const config = loadConfig(paths.configPath);
|
|
540
|
+
const { db } = openDb(paths.dbPath, config, Number.parseInt(env.EMBEDDING_DIMENSIONS ?? "384", 10) || 384);
|
|
541
|
+
try {
|
|
542
|
+
const items = claimQueueItems(db, options.maxItems ?? agentSettings.batchSize, options.filterFileIds);
|
|
543
|
+
const result = {
|
|
544
|
+
enabled: true,
|
|
545
|
+
processed: 0,
|
|
546
|
+
done: 0,
|
|
547
|
+
skipped: 0,
|
|
548
|
+
errored: 0,
|
|
549
|
+
items: [],
|
|
550
|
+
};
|
|
551
|
+
const workflowRunner = options.workflowRunner ?? createDefaultWorkflowRunner(env);
|
|
552
|
+
const templateEvolution = resolveTemplateEvolutionSettings(env);
|
|
553
|
+
const maxWorkflowAttempts = isInlineRetryCapable(workflowRunner) ? INLINE_WORKFLOW_ATTEMPTS : 1;
|
|
554
|
+
const workflowTimeoutMs = agentSettings.workflowTimeoutSeconds * 1000;
|
|
555
|
+
if (items.length > 0) {
|
|
556
|
+
options.log?.(`claimed ${items.length} items: ${items.map((item) => item.fileId).join(", ")}`);
|
|
557
|
+
}
|
|
558
|
+
const countOutcome = (status) => {
|
|
559
|
+
if (status === "done") {
|
|
560
|
+
result.done += 1;
|
|
561
|
+
}
|
|
562
|
+
else if (status === "skipped") {
|
|
563
|
+
result.skipped += 1;
|
|
564
|
+
}
|
|
565
|
+
else if (status === "error") {
|
|
566
|
+
result.errored += 1;
|
|
567
|
+
}
|
|
568
|
+
result.processed += 1;
|
|
569
|
+
};
|
|
570
|
+
for (const item of items) {
|
|
571
|
+
options.log?.(`${item.fileId}: start processing attempt=${item.attempts + 1} queuedAt=${item.queuedAt} thread=${item.threadId ?? "-"}`);
|
|
572
|
+
const file = fetchVaultFile(db, item.fileId);
|
|
573
|
+
if (!file) {
|
|
574
|
+
updateQueueStatus(db, item.fileId, {
|
|
575
|
+
status: "error",
|
|
576
|
+
processedAt: toOffsetIso(),
|
|
577
|
+
errorMessage: `Vault file missing from index: ${item.fileId}`,
|
|
578
|
+
incrementAttempts: true,
|
|
579
|
+
});
|
|
580
|
+
countOutcome("error");
|
|
581
|
+
options.log?.(`${item.fileId}: error thread=- result=- message=Vault file missing from index`);
|
|
582
|
+
result.items.push({
|
|
583
|
+
fileId: item.fileId,
|
|
584
|
+
status: "error",
|
|
585
|
+
reason: "Vault file missing from index",
|
|
586
|
+
});
|
|
587
|
+
continue;
|
|
588
|
+
}
|
|
589
|
+
let threadId = item.threadId ?? null;
|
|
590
|
+
let resultManifestPath = null;
|
|
591
|
+
try {
|
|
592
|
+
const localFilePath = await ensureLocalVaultFile(file, paths.vaultPath, env);
|
|
593
|
+
const { artifacts, input } = prepareCodexWorkflowInput(paths, item, file, localFilePath, env, templateEvolution.canApply);
|
|
594
|
+
resultManifestPath = artifacts.resultPath;
|
|
595
|
+
let finalOutcome = null;
|
|
596
|
+
let lastWorkflowError;
|
|
597
|
+
for (let attempt = 1; attempt <= maxWorkflowAttempts; attempt += 1) {
|
|
598
|
+
try {
|
|
599
|
+
const mode = threadId ? "resume" : "start";
|
|
600
|
+
const workflowController = new AbortController();
|
|
601
|
+
let loggedStartedThreadId = null;
|
|
602
|
+
const attemptInput = {
|
|
603
|
+
...input,
|
|
604
|
+
signal: workflowController.signal,
|
|
605
|
+
onThreadStarted: (startedThreadId) => {
|
|
606
|
+
if (loggedStartedThreadId === startedThreadId) {
|
|
607
|
+
return;
|
|
608
|
+
}
|
|
609
|
+
loggedStartedThreadId = startedThreadId;
|
|
610
|
+
threadId = startedThreadId;
|
|
611
|
+
updateQueueWorkflowTracking(db, item.fileId, {
|
|
612
|
+
threadId: startedThreadId,
|
|
613
|
+
resultManifestPath: artifacts.resultPath,
|
|
614
|
+
});
|
|
615
|
+
options.log?.(`${item.fileId}: workflow started mode=${mode} attempt=${attempt}/${maxWorkflowAttempts} thread=${startedThreadId} result=${artifacts.resultPath}`);
|
|
616
|
+
},
|
|
617
|
+
};
|
|
618
|
+
options.log?.(`${item.fileId}: launching workflow mode=${mode} attempt=${attempt}/${maxWorkflowAttempts} timeout=${agentSettings.workflowTimeoutSeconds}s result=${artifacts.resultPath}`);
|
|
619
|
+
const handle = threadId
|
|
620
|
+
? await runWithWorkflowTimeout("resumeWorkflow", workflowTimeoutMs, workflowController, () => workflowRunner.resumeWorkflow(threadId, attemptInput))
|
|
621
|
+
: await runWithWorkflowTimeout("startWorkflow", workflowTimeoutMs, workflowController, () => workflowRunner.startWorkflow(attemptInput));
|
|
622
|
+
threadId = handle.threadId;
|
|
623
|
+
if (loggedStartedThreadId !== handle.threadId) {
|
|
624
|
+
loggedStartedThreadId = handle.threadId;
|
|
625
|
+
options.log?.(`${item.fileId}: workflow started mode=${mode} attempt=${attempt}/${maxWorkflowAttempts} thread=${handle.threadId} result=${artifacts.resultPath}`);
|
|
626
|
+
}
|
|
627
|
+
updateQueueWorkflowTracking(db, item.fileId, {
|
|
628
|
+
threadId: handle.threadId,
|
|
629
|
+
resultManifestPath: artifacts.resultPath,
|
|
630
|
+
});
|
|
631
|
+
options.log?.(`${item.fileId}: waiting for workflow result thread=${handle.threadId} attempt=${attempt}/${maxWorkflowAttempts} result=${artifacts.resultPath}`);
|
|
632
|
+
const collectController = new AbortController();
|
|
633
|
+
const manifest = await runWithWorkflowTimeout("collectResult", workflowTimeoutMs, collectController, () => workflowRunner.collectResult(handle, {
|
|
634
|
+
...input,
|
|
635
|
+
signal: collectController.signal,
|
|
636
|
+
}));
|
|
637
|
+
assertTemplateEvolutionAllowed(manifest, templateEvolution);
|
|
638
|
+
finalOutcome = {
|
|
639
|
+
outcome: applyWorkflowManifest(db, item.fileId, manifest, artifacts.resultPath),
|
|
640
|
+
manifest,
|
|
641
|
+
handleThreadId: handle.threadId,
|
|
642
|
+
};
|
|
643
|
+
break;
|
|
644
|
+
}
|
|
645
|
+
catch (error) {
|
|
646
|
+
lastWorkflowError = error;
|
|
647
|
+
threadId = readPersistedWorkflowThreadId(artifacts.queueItemPath) ?? threadId;
|
|
648
|
+
if (threadId) {
|
|
649
|
+
updateQueueWorkflowTracking(db, item.fileId, {
|
|
650
|
+
threadId,
|
|
651
|
+
resultManifestPath: artifacts.resultPath,
|
|
652
|
+
});
|
|
653
|
+
}
|
|
654
|
+
const recoveredManifest = shouldAttemptManifestRecovery(error)
|
|
655
|
+
? readRecoverableWorkflowResult(artifacts.resultPath, threadId)
|
|
656
|
+
: null;
|
|
657
|
+
if (recoveredManifest) {
|
|
658
|
+
assertTemplateEvolutionAllowed(recoveredManifest, templateEvolution);
|
|
659
|
+
finalOutcome = {
|
|
660
|
+
outcome: applyWorkflowManifest(db, item.fileId, recoveredManifest, artifacts.resultPath),
|
|
661
|
+
manifest: recoveredManifest,
|
|
662
|
+
handleThreadId: recoveredManifest.threadId,
|
|
663
|
+
};
|
|
664
|
+
options.log?.(`${item.fileId}: recovered persisted workflow result status=${recoveredManifest.status} thread=${recoveredManifest.threadId} ${formatManifestLogFields(recoveredManifest)} result=${artifacts.resultPath} message=${formatWorkflowError(error)}`);
|
|
665
|
+
break;
|
|
666
|
+
}
|
|
667
|
+
if (!shouldRetryWorkflowAttempt(error, attempt, maxWorkflowAttempts)) {
|
|
668
|
+
throw error;
|
|
669
|
+
}
|
|
670
|
+
options.log?.(`${item.fileId}: retrying workflow attempt ${attempt + 1}/${maxWorkflowAttempts} thread=${threadId ?? "-"} result=${artifacts.resultPath} message=${formatWorkflowError(error)}`);
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
if (!finalOutcome) {
|
|
674
|
+
throw (lastWorkflowError ?? new AppError("Workflow completed without a result", "runtime"));
|
|
675
|
+
}
|
|
676
|
+
options.log?.(`${item.fileId}: ${finalOutcome.outcome.status} thread=${finalOutcome.handleThreadId} ${formatManifestLogFields(finalOutcome.manifest)} result=${artifacts.resultPath}`);
|
|
677
|
+
countOutcome(finalOutcome.outcome.status);
|
|
678
|
+
result.items.push({
|
|
679
|
+
fileId: item.fileId,
|
|
680
|
+
status: finalOutcome.outcome.status,
|
|
681
|
+
pageId: finalOutcome.outcome.pageId,
|
|
682
|
+
reason: finalOutcome.manifest.reason,
|
|
683
|
+
threadId: finalOutcome.handleThreadId,
|
|
684
|
+
decision: finalOutcome.manifest.decision,
|
|
685
|
+
skillsUsed: finalOutcome.manifest.skillsUsed,
|
|
686
|
+
createdPageIds: finalOutcome.manifest.createdPageIds,
|
|
687
|
+
updatedPageIds: finalOutcome.manifest.updatedPageIds,
|
|
688
|
+
proposedTypeNames: finalOutcome.manifest.proposedTypes.map((entry) => entry.name),
|
|
689
|
+
resultManifestPath: artifacts.resultPath,
|
|
690
|
+
});
|
|
691
|
+
}
|
|
692
|
+
catch (error) {
|
|
693
|
+
const recoveredManifest = shouldAttemptManifestRecovery(error)
|
|
694
|
+
? readRecoverableWorkflowResult(resultManifestPath, threadId)
|
|
695
|
+
: null;
|
|
696
|
+
if (recoveredManifest && resultManifestPath) {
|
|
697
|
+
assertTemplateEvolutionAllowed(recoveredManifest, templateEvolution);
|
|
698
|
+
const recoveredOutcome = applyWorkflowManifest(db, item.fileId, recoveredManifest, resultManifestPath);
|
|
699
|
+
options.log?.(`${item.fileId}: recovered persisted workflow result after terminal failure status=${recoveredOutcome.status} thread=${recoveredManifest.threadId} ${formatManifestLogFields(recoveredManifest)} result=${resultManifestPath} message=${formatWorkflowError(error)}`);
|
|
700
|
+
countOutcome(recoveredOutcome.status);
|
|
701
|
+
result.items.push({
|
|
702
|
+
fileId: item.fileId,
|
|
703
|
+
status: recoveredOutcome.status,
|
|
704
|
+
pageId: recoveredOutcome.pageId,
|
|
705
|
+
reason: recoveredManifest.reason,
|
|
706
|
+
threadId: recoveredManifest.threadId,
|
|
707
|
+
decision: recoveredManifest.decision,
|
|
708
|
+
skillsUsed: recoveredManifest.skillsUsed,
|
|
709
|
+
createdPageIds: recoveredManifest.createdPageIds,
|
|
710
|
+
updatedPageIds: recoveredManifest.updatedPageIds,
|
|
711
|
+
proposedTypeNames: recoveredManifest.proposedTypes.map((entry) => entry.name),
|
|
712
|
+
resultManifestPath,
|
|
713
|
+
});
|
|
714
|
+
continue;
|
|
715
|
+
}
|
|
716
|
+
const message = formatWorkflowError(error);
|
|
717
|
+
updateQueueWorkflowError(db, item.fileId, {
|
|
718
|
+
errorMessage: message,
|
|
719
|
+
threadId,
|
|
720
|
+
resultManifestPath,
|
|
721
|
+
});
|
|
722
|
+
options.log?.(`${item.fileId}: error thread=${threadId ?? "-"} result=${resultManifestPath ?? "-"} message=${message}`);
|
|
723
|
+
countOutcome("error");
|
|
724
|
+
result.items.push({
|
|
725
|
+
fileId: item.fileId,
|
|
726
|
+
status: "error",
|
|
727
|
+
pageId: item.resultPageId ?? null,
|
|
728
|
+
reason: message,
|
|
729
|
+
threadId,
|
|
730
|
+
resultManifestPath,
|
|
731
|
+
});
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
return result;
|
|
735
|
+
}
|
|
736
|
+
finally {
|
|
737
|
+
db.close();
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
export function getWikiAgentStatus(env = process.env) {
|
|
741
|
+
return resolveAgentSettings(env);
|
|
742
|
+
}
|