@rubytech/create-realagent 1.0.823 → 1.0.824
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/payload/platform/neo4j/migrations/006-prune-bogus-whatsapp-persons.ts +132 -0
- package/payload/platform/plugins/admin/hooks/__tests__/archive-ingest-surface-gate.test.sh +1 -1
- package/payload/platform/plugins/admin/hooks/archive-ingest-surface-gate.sh +2 -2
- package/payload/platform/plugins/docs/references/plugins-guide.md +1 -1
- package/payload/platform/plugins/memory/mcp/dist/tools/memory-archive-write.d.ts.map +1 -1
- package/payload/platform/plugins/memory/mcp/dist/tools/memory-archive-write.js +10 -5
- package/payload/platform/plugins/memory/mcp/dist/tools/memory-archive-write.js.map +1 -1
- package/payload/platform/plugins/memory/mcp/dist/tools/whatsapp-export-insight-pass.d.ts +4 -0
- package/payload/platform/plugins/memory/mcp/dist/tools/whatsapp-export-insight-pass.d.ts.map +1 -1
- package/payload/platform/plugins/memory/mcp/dist/tools/whatsapp-export-insight-pass.js +106 -30
- package/payload/platform/plugins/memory/mcp/dist/tools/whatsapp-export-insight-pass.js.map +1 -1
- package/payload/platform/plugins/whatsapp-import/bin/ingest.mjs +174 -115
- package/payload/platform/plugins/whatsapp-import/bin/whatsapp-ingest.sh +18 -7
- package/payload/platform/plugins/whatsapp-import/lib/dist/parse-export.js +24 -0
- package/payload/platform/plugins/whatsapp-import/lib/dist/parse-export.js.map +1 -1
- package/payload/platform/plugins/whatsapp-import/lib/src/__tests__/filter-gate.test.ts +2 -0
- package/payload/platform/plugins/whatsapp-import/lib/src/__tests__/parse-export-lrm.test.ts +83 -0
- package/payload/platform/plugins/whatsapp-import/lib/src/parse-export.ts +25 -0
- package/payload/platform/plugins/whatsapp-import/skills/whatsapp-import/SKILL.md +11 -9
- package/payload/platform/templates/specialists/agents/database-operator.md +1 -1
- package/payload/server/chunk-JNVZQMTZ.js +593 -0
- package/payload/server/chunk-L3T7ECLI.js +1116 -0
- package/payload/server/chunk-LFOR2ACU.js +10079 -0
- package/payload/server/chunk-NAP6XMLW.js +2233 -0
- package/payload/server/client-pool-M2DU74ZP.js +32 -0
- package/payload/server/cloudflare-task-tracker-6S23B7QX.js +17 -0
- package/payload/server/maxy-edge.js +3 -3
- package/payload/server/neo4j-migrations-YED5CFPF.js +428 -0
- package/payload/server/public/assets/{admin-Bnj-1qCb.js → admin-DOkUspG1.js} +1 -1
- package/payload/server/public/index.html +1 -1
- package/payload/server/server.js +120 -7
|
@@ -1,21 +1,22 @@
|
|
|
1
1
|
---
|
|
2
2
|
name: whatsapp-import
|
|
3
|
-
description: Phase 1 of the WhatsApp `_chat.txt` ingest contract — deterministic, LLM-free. Preview the archive (parsed counts, date range, sender histogram), ask the operator to choose a filter (`all`, `senders=<csv>`, `date-range=<isoFrom>..<isoTo>`), then write Conversation + Messages + NEXT chain
|
|
3
|
+
description: Phase 1 of the WhatsApp `_chat.txt` ingest contract — deterministic, LLM-free. Preview the archive (parsed counts, date range, sender histogram), confirm the owner + third-party `:Person`, ask the operator to choose a filter (`all`, `senders=<csv>`, `date-range=<isoFrom>..<isoTo>`), then write Conversation + Messages + NEXT chain via the single Bash entry `whatsapp-ingest.sh`. The writer binds participants to the owner + subject pair: any parsed senderName outside that closed set LOUD-FAILs (Task 887 §A0). NO observations and NO LLM at this phase — semantic enrichment lives in the `whatsapp-import-enrich` skill (Phase 2). Triggers when the user asks to import a WhatsApp chat, ingest a `_chat.txt` file, or drops the contents of an "Export Chat" folder into chat. Distinct from the live `whatsapp` plugin (Baileys); this is import-from-export only.
|
|
4
4
|
---
|
|
5
5
|
|
|
6
6
|
# WhatsApp Import — Phase 1 (Load)
|
|
7
7
|
|
|
8
8
|
Phase 1 of the two-phase WhatsApp ingest contract. Deterministic only: parse → preview → operator-supplied filter → archive-write. NO LLM is invoked at this phase. The chunked Haiku insight pass moved to Phase 2 (`whatsapp-import-enrich` skill) so one ingest cannot blow the operator's context window with `:Observation` enumeration prose.
|
|
9
9
|
|
|
10
|
-
## Owner confirmation (mandatory first step)
|
|
10
|
+
## Owner + subject confirmation (mandatory first step)
|
|
11
11
|
|
|
12
|
-
A WhatsApp
|
|
12
|
+
A WhatsApp DM has exactly two participants. The **owner** is the operator who exported the `_chat.txt`; the **subject** is the third party in the conversation. Both must resolve to existing graph nodes (`:AdminUser` or `:Person`) before the script runs — Task 887 §A0 closes the auto-Person leak by binding the writer to that closed pair.
|
|
13
13
|
|
|
14
|
-
1. List every `:AdminUser`
|
|
14
|
+
1. List every `:AdminUser` and the senders surfaced by Step 1 preview via `mcp__graph__maxy-graph-read_neo4j_cypher`:
|
|
15
15
|
`MATCH (u:AdminUser) RETURN elementId(u) AS elementId, u.name AS name, u.userId AS userId, u.accountId AS accountId`
|
|
16
16
|
2. Ask the operator: "Who exported this `_chat.txt`?" — accept either an existing `:AdminUser` elementId or, if the operator names someone not in the graph, surface that as a blocker (auto-creating an unknown owner is refused).
|
|
17
|
-
3.
|
|
18
|
-
4.
|
|
17
|
+
3. Identify the third party from the preview's sender histogram. Look up the matching `:Person` (by name); if no match, ask the operator to confirm a `:Person` elementId or block until one exists. **Auto-creating the third-party `:Person` is forbidden** — the operator must confirm the canonical node.
|
|
18
|
+
4. Echo both back verbatim and require explicit yes/no confirmation.
|
|
19
|
+
5. Persist the owner's `elementId` as `--owner-element-id` and the subject's as `--subject-person-id`.
|
|
19
20
|
|
|
20
21
|
## Step 1 — preview (mandatory before any write)
|
|
21
22
|
|
|
@@ -53,6 +54,7 @@ Single Bash call:
|
|
|
53
54
|
```bash
|
|
54
55
|
bash platform/plugins/whatsapp-import/bin/whatsapp-ingest.sh <archive.zip|dir|_chat.txt> \
|
|
55
56
|
--owner-element-id <id> \
|
|
57
|
+
--subject-person-id <id> \
|
|
56
58
|
--scope <admin|public> \
|
|
57
59
|
--filter <all|senders=<csv>|date-range=<isoFrom>..<isoTo>>
|
|
58
60
|
```
|
|
@@ -64,9 +66,9 @@ Optional flags:
|
|
|
64
66
|
|
|
65
67
|
The script:
|
|
66
68
|
- Unzips the archive if needed; locates `_chat.txt`.
|
|
67
|
-
- Parses the file deterministically (year shape, sender/body grammar, timezone offset).
|
|
69
|
+
- Parses the file deterministically (year shape, sender/body grammar, timezone offset, U+200E/U+200F leading-bidi-strip).
|
|
68
70
|
- Applies the operator-supplied filter to `parseResult.parsedLines` BEFORE archive-write.
|
|
69
|
-
-
|
|
71
|
+
- Validates every distinct parsed senderName against the canonical-name candidates of `{owner, subject}` (NFKC-trim-lower normalisation). Any miss LOUD-FAILs with `[whatsapp-ingest] FAIL parser-miss reason="senderName=<verbatim> not in preview histogram (parser failure — re-export or report)"` and exits non-zero. **Never auto-creates a participant** — Task 887 §A0 deleted that fallback path.
|
|
70
72
|
- Writes the Conversation + Messages + edges + NEXT chronology via `memoryArchiveWrite` directly (no MCP envelope between steps).
|
|
71
73
|
|
|
72
74
|
NO insight pass runs. The `--no-insight` flag of older releases is gone — Phase 1 always means parse + filter + archive-write, nothing else.
|
|
@@ -87,7 +89,7 @@ Stdout JSON shape (success — full diagnostic counters per Task 871 success cri
|
|
|
87
89
|
"messagesAlreadyExisted": 0,
|
|
88
90
|
"nextEdgesProcessed": 1706,
|
|
89
91
|
"nextEdgesCreated": 1706,
|
|
90
|
-
"participantsAlreadyExisted":
|
|
92
|
+
"participantsAlreadyExisted": 2,
|
|
91
93
|
"ms": 6800
|
|
92
94
|
}
|
|
93
95
|
```
|
|
@@ -123,7 +123,7 @@ Per-source archive imports keep their own skill because their CSVs already encod
|
|
|
123
123
|
- **Phase 1 — preview-then-filtered-write** (`whatsapp-import` skill). Phase 1 is LLM-FREE. Three steps:
|
|
124
124
|
1. **Preview** via `mcp__memory__whatsapp-export-preview` — read-only parse that returns `{conversationSha256, parsed, mediaSkipped, systemSkipped, dateRange:{first,last}, senders:[{name,messageCount}], totalMessages, archiveBytes}`. No Cypher writes.
|
|
125
125
|
2. **Operator chooses a filter.** Surface the preview to the operator and ask: "Filter to apply: `all`, `senders=<csv>`, or `date-range=<isoFrom>..<isoTo>`?". `--filter` is mandatory — the deterministic Bash entry refuses to write without it (`feedback_compress_at_ingest_for_bulk_archives.md`).
|
|
126
|
-
3. **Archive-write** via `bash platform/plugins/whatsapp-import/bin/whatsapp-ingest.sh <archive> --owner-element-id <id> --scope <admin|public> --filter <chosen>`. Parses, applies the filter, writes Conversation + Messages with chronological NEXT chain,
|
|
126
|
+
3. **Archive-write** via `bash platform/plugins/whatsapp-import/bin/whatsapp-ingest.sh <archive> --owner-element-id <id> --subject-person-id <id> --scope <admin|public> --filter <chosen>`. Parses, applies the filter, writes Conversation + Messages with chronological NEXT chain. Writer is bound to the `{owner, subject}` pair from the preview histogram — any parsed senderName outside that closed set LOUD-FAILs (Task 887 §A0); the script does NOT auto-create participant `:Person` nodes. ZERO `:Observation` writes — the LLM insight pass moved to Phase 2.
|
|
127
127
|
|
|
128
128
|
Phase 1 agent-return is COUNTERS ONLY — no inline enumeration of mention/task/preference counts, no multi-paragraph "ask to enrich" prose. Surface as one chat message: the JSON shape `{conversationElementId, conversationId, parsed, written, alreadyExisted, nextEdgesCreated, ms}` plus one sentence: "Preview before any future re-import via `mcp__memory__whatsapp-export-preview`; enrich semantically when ready via the `whatsapp-import-enrich` skill." The legacy `mcp__memory__whatsapp-export-parse` / `whatsapp-export-insight-write` / `memory-archive-write{archiveType:whatsapp-export}` MCP tools remain blocked at the harness; the Bash script is the only supported archive-write invocation. SKILL: `platform/plugins/whatsapp-import/skills/whatsapp-import/SKILL.md`.
|
|
129
129
|
- **Phase 2 — enrich** (`whatsapp-import-enrich` skill). Operator-triggered ("enrich the X chat"). First runs `mcp__memory__whatsapp-export-insight-pass` against the already-loaded Conversation (chunkSize=50, overlap=5, server-side `confidence>=0.8` gate) to lay down `:Observation {observationStatus:'auto-extracted'}` rows. Then walks the auto-created participants and auto-extracted observations, surfacing evidence per row, and writes operator-confirmed wiring (`apoc.refactor.mergeNodes` for participant promotion/merge, `:MENTIONS` and `:RELATED_TO` edges with `evidenceSnippet`/`evidenceMessageIds`, `:Task` via `mcp__tasks__task-create`, `:Preference` via `memory-write`). Idempotent — re-running surfaces only items still in `auto-created`/`auto-extracted` state. SKILL: `platform/plugins/whatsapp-import/skills/whatsapp-import-enrich/SKILL.md`.
|
|
@@ -0,0 +1,593 @@
|
|
|
1
|
+
import {
|
|
2
|
+
__commonJS,
|
|
3
|
+
__toESM,
|
|
4
|
+
getSession
|
|
5
|
+
} from "./chunk-NAP6XMLW.js";
|
|
6
|
+
|
|
7
|
+
// ../lib/graph-write/dist/audit.js
|
|
8
|
+
var require_audit = __commonJS({
|
|
9
|
+
"../lib/graph-write/dist/audit.js"(exports) {
|
|
10
|
+
"use strict";
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.auditCypherWrite = auditCypherWrite;
|
|
13
|
+
exports.formatAuditLine = formatAuditLine;
|
|
14
|
+
var EDGE_PATTERN = /\[[^\]]*?:([A-Z_][A-Za-z0-9_]*(?:\|[A-Z_][A-Za-z0-9_]*)*)[^\]]*?\]/g;
|
|
15
|
+
var CREATE_OR_MERGE_NODE = /\b(?:CREATE|MERGE)\s*\(\s*[A-Za-z_][A-Za-z0-9_]*\s*:\s*[A-Z]/g;
|
|
16
|
+
var PROVENANCE_TOKEN = /\bcreatedBy(?:Agent|Tool|Session|Source)\b/g;
|
|
17
|
+
function stripStringLiterals(cypher) {
|
|
18
|
+
return cypher.replace(/'[^']*'|"[^"]*"/g, '""');
|
|
19
|
+
}
|
|
20
|
+
function extractEdgeTypes(cleaned) {
|
|
21
|
+
const out = /* @__PURE__ */ new Set();
|
|
22
|
+
for (const m of cleaned.matchAll(EDGE_PATTERN)) {
|
|
23
|
+
for (const t of m[1].split("|")) {
|
|
24
|
+
const clean = t.trim();
|
|
25
|
+
if (clean)
|
|
26
|
+
out.add(clean);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return out;
|
|
30
|
+
}
|
|
31
|
+
function countCreateOrMergeNodes(cleaned) {
|
|
32
|
+
const matches = cleaned.match(CREATE_OR_MERGE_NODE);
|
|
33
|
+
return matches ? matches.length : 0;
|
|
34
|
+
}
|
|
35
|
+
function countProvenanceStamps(cleaned) {
|
|
36
|
+
const matches = cleaned.match(PROVENANCE_TOKEN);
|
|
37
|
+
return matches ? matches.length : 0;
|
|
38
|
+
}
|
|
39
|
+
function auditCypherWrite(input) {
|
|
40
|
+
const warnings = [];
|
|
41
|
+
const cleaned = stripStringLiterals(input.cypher);
|
|
42
|
+
const referencedTypes = extractEdgeTypes(cleaned);
|
|
43
|
+
for (const t of referencedTypes) {
|
|
44
|
+
if (!input.schema.relationshipTypes.has(t)) {
|
|
45
|
+
warnings.push({ kind: "unknown-type-warning", type: t });
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
if (input.nodesCreated > 0) {
|
|
49
|
+
const createOrMergeNodes = countCreateOrMergeNodes(cleaned);
|
|
50
|
+
if (createOrMergeNodes > 0) {
|
|
51
|
+
const stamps = countProvenanceStamps(cleaned);
|
|
52
|
+
if (stamps < createOrMergeNodes) {
|
|
53
|
+
warnings.push({
|
|
54
|
+
kind: "missing-provenance-warning",
|
|
55
|
+
created: createOrMergeNodes,
|
|
56
|
+
stamped: stamps
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
if (input.orphanIds.length > 0) {
|
|
62
|
+
warnings.push({ kind: "orphan-warning", orphanIds: input.orphanIds });
|
|
63
|
+
}
|
|
64
|
+
return warnings;
|
|
65
|
+
}
|
|
66
|
+
function formatAuditLine(line) {
|
|
67
|
+
const prefixField = `query="${line.cypherPrefix.replace(/"/g, "'")}"`;
|
|
68
|
+
switch (line.kind) {
|
|
69
|
+
case "accepted":
|
|
70
|
+
return `[graph-cypher-write] accepted ${prefixField} nodesCreated=${line.nodesCreated} relsCreated=${line.relsCreated} agentName=${line.agentName} sessionId=${line.sessionId}`;
|
|
71
|
+
case "orphan-warning":
|
|
72
|
+
return `[graph-cypher-write] orphan-warning ${prefixField} orphanIds=${line.orphanIds.join(",")}`;
|
|
73
|
+
case "unknown-type-warning":
|
|
74
|
+
return `[graph-cypher-write] unknown-type-warning ${prefixField} type=${line.type}`;
|
|
75
|
+
case "missing-provenance-warning":
|
|
76
|
+
return `[graph-cypher-write] missing-provenance-warning ${prefixField} created=${line.created} stamped=${line.stamped}`;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
// ../lib/graph-write/dist/index.js
|
|
83
|
+
var require_dist = __commonJS({
|
|
84
|
+
"../lib/graph-write/dist/index.js"(exports) {
|
|
85
|
+
"use strict";
|
|
86
|
+
var __createBinding = exports && exports.__createBinding || (Object.create ? (function(o, m, k, k2) {
|
|
87
|
+
if (k2 === void 0) k2 = k;
|
|
88
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
89
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
90
|
+
desc = { enumerable: true, get: function() {
|
|
91
|
+
return m[k];
|
|
92
|
+
} };
|
|
93
|
+
}
|
|
94
|
+
Object.defineProperty(o, k2, desc);
|
|
95
|
+
}) : (function(o, m, k, k2) {
|
|
96
|
+
if (k2 === void 0) k2 = k;
|
|
97
|
+
o[k2] = m[k];
|
|
98
|
+
}));
|
|
99
|
+
var __exportStar = exports && exports.__exportStar || function(m, exports2) {
|
|
100
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports2, p)) __createBinding(exports2, m, p);
|
|
101
|
+
};
|
|
102
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
103
|
+
exports.ACTION_PROVENANCE_LABELS = void 0;
|
|
104
|
+
exports.stampCreatedBy = stampCreatedBy;
|
|
105
|
+
exports.writeNodeWithEdges = writeNodeWithEdges2;
|
|
106
|
+
__exportStar(require_audit(), exports);
|
|
107
|
+
exports.ACTION_PROVENANCE_LABELS = /* @__PURE__ */ new Set([
|
|
108
|
+
"Person",
|
|
109
|
+
"UserProfile",
|
|
110
|
+
"AdminUser",
|
|
111
|
+
"Organization",
|
|
112
|
+
"LocalBusiness",
|
|
113
|
+
"CloudflareTunnel",
|
|
114
|
+
"CloudflareHostname"
|
|
115
|
+
]);
|
|
116
|
+
function requiresActionProvenance(labels) {
|
|
117
|
+
for (const label of labels) {
|
|
118
|
+
if (exports.ACTION_PROVENANCE_LABELS.has(label))
|
|
119
|
+
return true;
|
|
120
|
+
}
|
|
121
|
+
return false;
|
|
122
|
+
}
|
|
123
|
+
function findProducedFromTaskCandidates(relationships) {
|
|
124
|
+
return relationships.filter((r) => r.type === "PRODUCED" && r.direction === "incoming");
|
|
125
|
+
}
|
|
126
|
+
function stampCreatedBy(props, createdBy) {
|
|
127
|
+
return {
|
|
128
|
+
...props,
|
|
129
|
+
createdByAgent: createdBy.agent ?? "unknown",
|
|
130
|
+
createdBySession: createdBy.session ?? "unknown",
|
|
131
|
+
createdByTool: createdBy.tool ?? null,
|
|
132
|
+
createdBySource: createdBy.source ?? null
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
async function writeNodeWithEdges2(params) {
|
|
136
|
+
const { session, labels, props, relationships, createdBy } = params;
|
|
137
|
+
const agentLabel = createdBy.agent ?? createdBy.source ?? "unknown";
|
|
138
|
+
const labelCsv = labels.join(",");
|
|
139
|
+
const reviewDigestActionTool = typeof props.actionTool === "string" && props.actionTool === "review-digest-compose";
|
|
140
|
+
if (labels.includes("ReviewAlert") || reviewDigestActionTool) {
|
|
141
|
+
const actionToolField = reviewDigestActionTool ? "review-digest-compose" : "n/a";
|
|
142
|
+
process.stderr.write(`[graph-write] reject reason=removed-feature labels=${labelCsv} actionTool=${actionToolField} agent=${agentLabel}
|
|
143
|
+
`);
|
|
144
|
+
throw new Error("Write doctrine violated: review-detector feature removed (Task 884) \u2014 `:ReviewAlert` and `:Event {actionTool:'review-digest-compose'}` writes are not allowed.");
|
|
145
|
+
}
|
|
146
|
+
if (!relationships || relationships.length < 1) {
|
|
147
|
+
process.stderr.write(`[graph-write] reject reason=zero-relationships labels=${labelCsv} agent=${agentLabel}
|
|
148
|
+
`);
|
|
149
|
+
throw new Error("Write doctrine violated: a node must be created with at least one relationship. See .docs/neo4j.md (Write doctrine).");
|
|
150
|
+
}
|
|
151
|
+
const labelStr = labels.map((l) => `\`${l.replace(/`/g, "")}\``).join(":");
|
|
152
|
+
const nodeProps = stampCreatedBy(props, createdBy);
|
|
153
|
+
return await session.executeWrite(async (tx) => {
|
|
154
|
+
const targetIds = relationships.map((r) => r.targetNodeId);
|
|
155
|
+
const check = await tx.run(`UNWIND $ids AS id MATCH (t) WHERE elementId(t) = id RETURN elementId(t) AS id, labels(t) AS labels`, { ids: targetIds });
|
|
156
|
+
const labelsByTarget = /* @__PURE__ */ new Map();
|
|
157
|
+
for (const rec of check.records) {
|
|
158
|
+
labelsByTarget.set(rec.get("id"), rec.get("labels"));
|
|
159
|
+
}
|
|
160
|
+
const found = labelsByTarget.size;
|
|
161
|
+
const uniqueRequested = new Set(targetIds).size;
|
|
162
|
+
if (found !== uniqueRequested) {
|
|
163
|
+
process.stderr.write(`[graph-write] reject reason=unresolved-target labels=${labelCsv} agent=${agentLabel} requested=${uniqueRequested} found=${found}
|
|
164
|
+
`);
|
|
165
|
+
throw new Error(`Write doctrine violated: ${uniqueRequested - found} of ${uniqueRequested} relationship target(s) did not resolve (elementId mismatch). No node created.`);
|
|
166
|
+
}
|
|
167
|
+
let producedByTaskId = null;
|
|
168
|
+
if (requiresActionProvenance(labels) && (createdBy.agent ?? "") !== "system") {
|
|
169
|
+
const candidates = findProducedFromTaskCandidates(relationships);
|
|
170
|
+
const taskCandidates = candidates.filter((r) => {
|
|
171
|
+
const lbls = labelsByTarget.get(r.targetNodeId);
|
|
172
|
+
return Array.isArray(lbls) && lbls.includes("Task");
|
|
173
|
+
});
|
|
174
|
+
if (taskCandidates.length === 0) {
|
|
175
|
+
process.stderr.write(`[graph-write] reject reason=missing-action-provenance labels=${labelCsv} agent=${agentLabel}
|
|
176
|
+
`);
|
|
177
|
+
throw new Error(`Process provenance doctrine violated: write to ${labelCsv} requires an inbound :PRODUCED edge from a :Task (createdBy.agent='${agentLabel}'). See .docs/neo4j.md (Process provenance doctrine).`);
|
|
178
|
+
}
|
|
179
|
+
producedByTaskId = taskCandidates[0].targetNodeId;
|
|
180
|
+
}
|
|
181
|
+
let nodeRes;
|
|
182
|
+
try {
|
|
183
|
+
nodeRes = await tx.run(`CREATE (n:${labelStr} $props) RETURN elementId(n) AS nodeId, labels(n) AS nodeLabels`, { props: nodeProps });
|
|
184
|
+
} catch (err) {
|
|
185
|
+
const code = err?.code ?? "";
|
|
186
|
+
if (code === "Neo.ClientError.Schema.ConstraintValidationFailed" && labels.includes("UserProfile")) {
|
|
187
|
+
const accountIdProp = nodeProps.accountId;
|
|
188
|
+
const userIdProp = nodeProps.userId;
|
|
189
|
+
const acctSlice = typeof accountIdProp === "string" ? accountIdProp.slice(0, 8) : "unknown";
|
|
190
|
+
const userSlice = typeof userIdProp === "string" ? userIdProp.slice(0, 8) : "unknown";
|
|
191
|
+
process.stderr.write(`[graph-write] reject reason=user-profile-uniqueness-violation accountId=${acctSlice} userId=${userSlice} writer=${agentLabel}
|
|
192
|
+
`);
|
|
193
|
+
}
|
|
194
|
+
throw err;
|
|
195
|
+
}
|
|
196
|
+
const nodeId = nodeRes.records[0].get("nodeId");
|
|
197
|
+
const nodeLabels = nodeRes.records[0].get("nodeLabels");
|
|
198
|
+
let edgesCreated = 0;
|
|
199
|
+
for (const rel of relationships) {
|
|
200
|
+
const type = rel.type.replace(/`/g, "");
|
|
201
|
+
const q = rel.direction === "outgoing" ? `MATCH (a), (b) WHERE elementId(a) = $from AND elementId(b) = $to CREATE (a)-[:\`${type}\`]->(b)` : `MATCH (a), (b) WHERE elementId(a) = $from AND elementId(b) = $to CREATE (b)-[:\`${type}\`]->(a)`;
|
|
202
|
+
const r = await tx.run(q, { from: nodeId, to: rel.targetNodeId });
|
|
203
|
+
const created = r.summary.counters.updates().relationshipsCreated;
|
|
204
|
+
if (created === 0) {
|
|
205
|
+
process.stderr.write(`[graph-write] reject reason=unresolved-target-on-create labels=${labelCsv} agent=${agentLabel} relType=${rel.type} targetId=${rel.targetNodeId}
|
|
206
|
+
`);
|
|
207
|
+
throw new Error(`Write doctrine violated: relationship CREATE to target ${rel.targetNodeId} produced 0 edges (target likely deleted concurrently after pre-check). Transaction rolled back.`);
|
|
208
|
+
}
|
|
209
|
+
edgesCreated += created;
|
|
210
|
+
}
|
|
211
|
+
if (edgesCreated !== relationships.length) {
|
|
212
|
+
process.stderr.write(`[graph-write] reject reason=edge-count-mismatch labels=${labelCsv} agent=${agentLabel} requested=${relationships.length} created=${edgesCreated}
|
|
213
|
+
`);
|
|
214
|
+
throw new Error(`Write doctrine violated: expected ${relationships.length} edges, created ${edgesCreated}. Transaction rolled back.`);
|
|
215
|
+
}
|
|
216
|
+
process.stderr.write(`[graph-write] accepted labels=${labelCsv} edges=${edgesCreated} createdByAgent=${createdBy.agent ?? "unknown"} createdByTool=${createdBy.tool ?? createdBy.source ?? "unknown"} producedByTask=${producedByTaskId ?? "none"}
|
|
217
|
+
`);
|
|
218
|
+
return { nodeId, labels: nodeLabels, edgesCreated };
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
// app/lib/cloudflare-task-tracker.ts
|
|
225
|
+
import { readFileSync, existsSync } from "fs";
|
|
226
|
+
import { randomUUID } from "crypto";
|
|
227
|
+
var import_dist = __toESM(require_dist(), 1);
|
|
228
|
+
var CREATED_BY_AGENT = "cloudflare-setup-endpoint";
|
|
229
|
+
var TASK_KIND = "cloudflare-tunnel-login";
|
|
230
|
+
async function openCloudflareTask(params) {
|
|
231
|
+
const { accountId, conversationKey, inputsProvided, inputs, messageId } = params;
|
|
232
|
+
const taskId = randomUUID();
|
|
233
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
234
|
+
const session = getSession();
|
|
235
|
+
try {
|
|
236
|
+
const conv = await session.run(
|
|
237
|
+
`MATCH (c:Conversation {sessionKey: $conversationKey, accountId: $accountId}) RETURN elementId(c) AS id LIMIT 1`,
|
|
238
|
+
{ conversationKey, accountId }
|
|
239
|
+
);
|
|
240
|
+
if (conv.records.length === 0) {
|
|
241
|
+
throw new Error(
|
|
242
|
+
`cloudflare-task-tracker: no Conversation with sessionKey=${conversationKey.slice(-8)} for accountId \u2014 refusing to create an orphan Task`
|
|
243
|
+
);
|
|
244
|
+
}
|
|
245
|
+
const conversationElementId = conv.records[0].get("id");
|
|
246
|
+
let messageElementId = null;
|
|
247
|
+
let raisedDuringTag = `raisedDuringConversation=${conversationKey.slice(-8)}`;
|
|
248
|
+
if (messageId) {
|
|
249
|
+
const convIdRow = await session.run(
|
|
250
|
+
`MATCH (c:Conversation {sessionKey: $conversationKey, accountId: $accountId}) RETURN c.conversationId AS conversationId LIMIT 1`,
|
|
251
|
+
{ conversationKey, accountId }
|
|
252
|
+
);
|
|
253
|
+
const conversationId = convIdRow.records.length > 0 ? convIdRow.records[0].get("conversationId") : null;
|
|
254
|
+
if (conversationId) {
|
|
255
|
+
const msgRow = await session.run(
|
|
256
|
+
`MATCH (m:Message {messageId: $messageId, accountId: $accountId, conversationId: $conversationId}) RETURN elementId(m) AS id LIMIT 1`,
|
|
257
|
+
{ messageId, accountId, conversationId }
|
|
258
|
+
);
|
|
259
|
+
if (msgRow.records.length > 0) {
|
|
260
|
+
messageElementId = msgRow.records[0].get("id");
|
|
261
|
+
raisedDuringTag = `raisedDuringMessage=${messageId.slice(0, 8)}`;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
const props = {
|
|
266
|
+
taskId,
|
|
267
|
+
accountId,
|
|
268
|
+
name: "Cloudflare tunnel login + setup",
|
|
269
|
+
description: `Deterministic cloudflare setup invoked by /api/admin/cloudflare/setup. inputsProvided=[${inputsProvided.join(", ")}]`,
|
|
270
|
+
status: "running",
|
|
271
|
+
priority: "normal",
|
|
272
|
+
kind: TASK_KIND,
|
|
273
|
+
inputsProvided,
|
|
274
|
+
startedAt: now,
|
|
275
|
+
createdAt: now,
|
|
276
|
+
updatedAt: now
|
|
277
|
+
};
|
|
278
|
+
if (inputs?.adminLabel) props["inputs.adminLabel"] = inputs.adminLabel;
|
|
279
|
+
if (inputs?.adminDomain) props["inputs.adminDomain"] = inputs.adminDomain;
|
|
280
|
+
if (inputs?.publicLabel) props["inputs.publicLabel"] = inputs.publicLabel;
|
|
281
|
+
if (inputs?.publicDomain) props["inputs.publicDomain"] = inputs.publicDomain;
|
|
282
|
+
if (inputs?.apex) props["inputs.apex"] = inputs.apex;
|
|
283
|
+
const relationships = [
|
|
284
|
+
{
|
|
285
|
+
type: "RAISED_DURING",
|
|
286
|
+
direction: "outgoing",
|
|
287
|
+
targetNodeId: messageElementId ?? conversationElementId
|
|
288
|
+
}
|
|
289
|
+
];
|
|
290
|
+
const result = await (0, import_dist.writeNodeWithEdges)({
|
|
291
|
+
session,
|
|
292
|
+
labels: ["Task"],
|
|
293
|
+
props,
|
|
294
|
+
relationships,
|
|
295
|
+
createdBy: {
|
|
296
|
+
agent: CREATED_BY_AGENT,
|
|
297
|
+
session: conversationKey,
|
|
298
|
+
tool: "cloudflare-setup-endpoint"
|
|
299
|
+
}
|
|
300
|
+
});
|
|
301
|
+
process.stderr.write(
|
|
302
|
+
`[task] action-start kind=${TASK_KIND} taskId=${taskId} ${raisedDuringTag}
|
|
303
|
+
`
|
|
304
|
+
);
|
|
305
|
+
return { taskId, taskElementId: result.nodeId };
|
|
306
|
+
} finally {
|
|
307
|
+
await session.close();
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
async function appendCloudflareSteps(taskId, accountId, streamLogPath) {
|
|
311
|
+
if (!existsSync(streamLogPath)) return [];
|
|
312
|
+
let content;
|
|
313
|
+
try {
|
|
314
|
+
content = readFileSync(streamLogPath, "utf-8");
|
|
315
|
+
} catch {
|
|
316
|
+
return [];
|
|
317
|
+
}
|
|
318
|
+
const steps = [];
|
|
319
|
+
for (const line of content.split(/\r?\n/)) {
|
|
320
|
+
const m = line.match(/\bphase_line\s+setup-tunnel\s+step=(\S+)/);
|
|
321
|
+
if (m) steps.push(m[1]);
|
|
322
|
+
}
|
|
323
|
+
if (steps.length === 0) return [];
|
|
324
|
+
const session = getSession();
|
|
325
|
+
try {
|
|
326
|
+
await session.run(
|
|
327
|
+
`MATCH (t:Task {taskId: $taskId, accountId: $accountId})
|
|
328
|
+
SET t.steps = coalesce(t.steps, []) + $steps,
|
|
329
|
+
t.updatedAt = $updatedAt`,
|
|
330
|
+
{ taskId, accountId, steps, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
|
|
331
|
+
);
|
|
332
|
+
for (const step of steps) {
|
|
333
|
+
process.stderr.write(
|
|
334
|
+
`[task] action-step kind=${TASK_KIND} taskId=${taskId} step=${step}
|
|
335
|
+
`
|
|
336
|
+
);
|
|
337
|
+
}
|
|
338
|
+
return steps;
|
|
339
|
+
} finally {
|
|
340
|
+
await session.close();
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
async function completeCloudflareTask(params) {
|
|
344
|
+
const { taskId, taskElementId, accountId, conversationKey, tunnelId, tunnelName, hostnames, status, errorMessage } = params;
|
|
345
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
346
|
+
if (status === "failed" && (!errorMessage || errorMessage.trim().length === 0)) {
|
|
347
|
+
throw new Error(
|
|
348
|
+
"cloudflare-task-tracker: errorMessage is required when status='failed' (Task 885 process-provenance contract)."
|
|
349
|
+
);
|
|
350
|
+
}
|
|
351
|
+
const session = getSession();
|
|
352
|
+
try {
|
|
353
|
+
if (status === "completed" && tunnelId && tunnelName) {
|
|
354
|
+
const conv = await session.run(
|
|
355
|
+
`MATCH (c:Conversation {sessionKey: $conversationKey, accountId: $accountId}) RETURN elementId(c) AS id LIMIT 1`,
|
|
356
|
+
{ conversationKey, accountId }
|
|
357
|
+
);
|
|
358
|
+
const conversationElementId = conv.records.length > 0 ? conv.records[0].get("id") : null;
|
|
359
|
+
const tunnelProps = {
|
|
360
|
+
accountId,
|
|
361
|
+
tunnelId,
|
|
362
|
+
tunnelName,
|
|
363
|
+
createdAt: now,
|
|
364
|
+
updatedAt: now
|
|
365
|
+
};
|
|
366
|
+
const tunnelRels = [
|
|
367
|
+
{ type: "PRODUCED", direction: "incoming", targetNodeId: taskElementId }
|
|
368
|
+
];
|
|
369
|
+
if (conversationElementId) {
|
|
370
|
+
tunnelRels.push({
|
|
371
|
+
type: "RAISED_DURING",
|
|
372
|
+
direction: "outgoing",
|
|
373
|
+
targetNodeId: conversationElementId
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
const tunnelWrite = await (0, import_dist.writeNodeWithEdges)({
|
|
377
|
+
session,
|
|
378
|
+
labels: ["CloudflareTunnel"],
|
|
379
|
+
props: tunnelProps,
|
|
380
|
+
relationships: tunnelRels,
|
|
381
|
+
createdBy: {
|
|
382
|
+
agent: CREATED_BY_AGENT,
|
|
383
|
+
session: conversationKey,
|
|
384
|
+
tool: "cloudflare-setup-endpoint"
|
|
385
|
+
}
|
|
386
|
+
});
|
|
387
|
+
for (const h of hostnames ?? []) {
|
|
388
|
+
const hostRels = [
|
|
389
|
+
{ type: "PRODUCED", direction: "incoming", targetNodeId: taskElementId },
|
|
390
|
+
{
|
|
391
|
+
type: "ROUTES_TO",
|
|
392
|
+
direction: "outgoing",
|
|
393
|
+
targetNodeId: tunnelWrite.nodeId
|
|
394
|
+
}
|
|
395
|
+
];
|
|
396
|
+
await (0, import_dist.writeNodeWithEdges)({
|
|
397
|
+
session,
|
|
398
|
+
labels: ["CloudflareHostname"],
|
|
399
|
+
props: {
|
|
400
|
+
accountId,
|
|
401
|
+
hostnameValue: h.hostnameValue,
|
|
402
|
+
tunnelId,
|
|
403
|
+
isApex: h.isApex,
|
|
404
|
+
createdAt: now,
|
|
405
|
+
updatedAt: now
|
|
406
|
+
},
|
|
407
|
+
relationships: hostRels,
|
|
408
|
+
createdBy: {
|
|
409
|
+
agent: CREATED_BY_AGENT,
|
|
410
|
+
session: conversationKey,
|
|
411
|
+
tool: "cloudflare-setup-endpoint"
|
|
412
|
+
}
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
const setClauses = [
|
|
417
|
+
"t.status = $status",
|
|
418
|
+
"t.completedAt = $now",
|
|
419
|
+
"t.updatedAt = $now"
|
|
420
|
+
];
|
|
421
|
+
const queryParams = { taskId, accountId, status, now };
|
|
422
|
+
if (status === "failed" && errorMessage) {
|
|
423
|
+
setClauses.push("t.errorMessage = $errorMessage");
|
|
424
|
+
queryParams.errorMessage = errorMessage;
|
|
425
|
+
}
|
|
426
|
+
if (status === "completed") {
|
|
427
|
+
if (tunnelId) {
|
|
428
|
+
setClauses.push("t.tunnelId = $tunnelId");
|
|
429
|
+
queryParams.tunnelId = tunnelId;
|
|
430
|
+
}
|
|
431
|
+
if (tunnelName) {
|
|
432
|
+
setClauses.push("t.tunnelName = $tunnelName");
|
|
433
|
+
queryParams.tunnelName = tunnelName;
|
|
434
|
+
}
|
|
435
|
+
if (hostnames && hostnames.length > 0) {
|
|
436
|
+
setClauses.push("t.hostnames = $hostnamesList");
|
|
437
|
+
queryParams.hostnamesList = hostnames.map((h) => h.hostnameValue);
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
const updateRes = await session.run(
|
|
441
|
+
`MATCH (t:Task {taskId: $taskId, accountId: $accountId})
|
|
442
|
+
SET ${setClauses.join(", ")}
|
|
443
|
+
RETURN size(coalesce(t.steps, [])) AS stepsCount, count(t) AS affected`,
|
|
444
|
+
queryParams
|
|
445
|
+
);
|
|
446
|
+
const stepsCount = updateRes.records[0]?.get("stepsCount")?.toNumber?.() ?? 0;
|
|
447
|
+
const affected = updateRes.records[0]?.get("affected")?.toNumber?.() ?? 0;
|
|
448
|
+
if (affected !== 1) {
|
|
449
|
+
throw new Error(
|
|
450
|
+
`cloudflare-task-tracker: completeCloudflareTask MATCH count=${affected} for taskId=${taskId} \u2014 Task missing or accountId mismatch`
|
|
451
|
+
);
|
|
452
|
+
}
|
|
453
|
+
process.stderr.write(
|
|
454
|
+
`[task] action-done kind=${TASK_KIND} taskId=${taskId} status=${status} stepsCount=${stepsCount}
|
|
455
|
+
`
|
|
456
|
+
);
|
|
457
|
+
} finally {
|
|
458
|
+
await session.close();
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
function readTunnelState(brandConfigDir) {
|
|
462
|
+
const statePath = `${process.env.HOME ?? ""}/${brandConfigDir}/cloudflared/tunnel.state`;
|
|
463
|
+
if (!existsSync(statePath)) return null;
|
|
464
|
+
try {
|
|
465
|
+
const parsed = JSON.parse(readFileSync(statePath, "utf-8"));
|
|
466
|
+
const tunnelId = typeof parsed.tunnelId === "string" ? parsed.tunnelId : null;
|
|
467
|
+
const tunnelName = typeof parsed.tunnelName === "string" ? parsed.tunnelName : null;
|
|
468
|
+
const domain = typeof parsed.domain === "string" ? parsed.domain : null;
|
|
469
|
+
if (!tunnelId || !tunnelName || !domain) return null;
|
|
470
|
+
return { tunnelId, tunnelName, domain };
|
|
471
|
+
} catch {
|
|
472
|
+
return null;
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
var CLOUDFLARE_TASK_DIAGNOSTICS = {
|
|
476
|
+
scriptExitedNonzero: "script-exited-nonzero",
|
|
477
|
+
noTunnelStateOnDisk: "no-tunnel-state-on-disk",
|
|
478
|
+
endpointDiedPreReconcile: "endpoint-died-pre-reconcile"
|
|
479
|
+
};
|
|
480
|
+
var RECONCILE_DELAY_BUDGET_MS = 9e4;
|
|
481
|
+
var RECONCILE_HARD_AGE_MS = 60 * 60 * 1e3;
|
|
482
|
+
async function recoverRunningCloudflareTasks(accountId, brandConfigDir, conversationKeyForState) {
|
|
483
|
+
const session = getSession();
|
|
484
|
+
try {
|
|
485
|
+
const cutoff = new Date(Date.now() - RECONCILE_DELAY_BUDGET_MS).toISOString();
|
|
486
|
+
const stale = await session.run(
|
|
487
|
+
`MATCH (t:Task {accountId: $accountId, kind: $kind, status: 'running'})
|
|
488
|
+
WHERE t.startedAt < $cutoff
|
|
489
|
+
RETURN t.taskId AS taskId, elementId(t) AS taskElementId, t.startedAt AS startedAt
|
|
490
|
+
ORDER BY t.startedAt ASC`,
|
|
491
|
+
{ accountId, kind: TASK_KIND, cutoff }
|
|
492
|
+
);
|
|
493
|
+
const tunnelState = readTunnelState(brandConfigDir);
|
|
494
|
+
const tunnelStateFound = tunnelState !== null;
|
|
495
|
+
const resolved = [];
|
|
496
|
+
for (const record of stale.records) {
|
|
497
|
+
const taskId = record.get("taskId");
|
|
498
|
+
const taskElementId = record.get("taskElementId");
|
|
499
|
+
const startedAt = record.get("startedAt");
|
|
500
|
+
const ageMs = Date.now() - new Date(startedAt).getTime();
|
|
501
|
+
try {
|
|
502
|
+
if (tunnelStateFound && tunnelState) {
|
|
503
|
+
const tunnelExistsRow = await session.run(
|
|
504
|
+
`MATCH (t:Task {taskId: $taskId, accountId: $accountId})-[:PRODUCED]->(tn:CloudflareTunnel {tunnelId: $tunnelId})
|
|
505
|
+
RETURN count(tn) AS existsCount`,
|
|
506
|
+
{ taskId, accountId, tunnelId: tunnelState.tunnelId }
|
|
507
|
+
);
|
|
508
|
+
const existsCountRaw = tunnelExistsRow.records[0]?.get("existsCount");
|
|
509
|
+
const tunnelAlreadyExists = (typeof existsCountRaw === "number" ? existsCountRaw : existsCountRaw?.toNumber?.() ?? 0) > 0;
|
|
510
|
+
if (tunnelAlreadyExists) {
|
|
511
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
512
|
+
const closeRes = await session.run(
|
|
513
|
+
`MATCH (t:Task {taskId: $taskId, accountId: $accountId})
|
|
514
|
+
SET t.status = 'completed', t.completedAt = $now, t.updatedAt = $now,
|
|
515
|
+
t.tunnelId = $tunnelId, t.tunnelName = $tunnelName
|
|
516
|
+
RETURN count(t) AS affected`,
|
|
517
|
+
{ taskId, accountId, now, tunnelId: tunnelState.tunnelId, tunnelName: tunnelState.tunnelName }
|
|
518
|
+
);
|
|
519
|
+
const aff = closeRes.records[0]?.get("affected");
|
|
520
|
+
const affN = typeof aff === "number" ? aff : aff?.toNumber?.() ?? 0;
|
|
521
|
+
if (affN !== 1) {
|
|
522
|
+
throw new Error(`reconciler close-out MATCH count=${affN} for taskId=${taskId}`);
|
|
523
|
+
}
|
|
524
|
+
resolved.push({ taskId, resolution: "completed-existing-tunnel" });
|
|
525
|
+
process.stderr.write(
|
|
526
|
+
`[task] action-recover kind=${TASK_KIND} taskId=${taskId} age=${Math.round(ageMs / 1e3)}s tunnel-state-found=true tunnel-already-written=true resolution=completed
|
|
527
|
+
`
|
|
528
|
+
);
|
|
529
|
+
continue;
|
|
530
|
+
}
|
|
531
|
+
const convRow = await session.run(
|
|
532
|
+
`MATCH (t:Task {taskId: $taskId, accountId: $accountId})-[:RAISED_DURING]->(target)
|
|
533
|
+
OPTIONAL MATCH (target)-[:PART_OF]->(c:Conversation)
|
|
534
|
+
RETURN coalesce(c.sessionKey, target.sessionKey) AS sessionKey LIMIT 1`,
|
|
535
|
+
{ taskId, accountId }
|
|
536
|
+
);
|
|
537
|
+
const resolvedConversationKey = convRow.records[0]?.get("sessionKey");
|
|
538
|
+
await completeCloudflareTask({
|
|
539
|
+
taskId,
|
|
540
|
+
taskElementId,
|
|
541
|
+
accountId,
|
|
542
|
+
// Empty string when the edge resolution found no Conversation
|
|
543
|
+
// (rare — would require a malformed Task with no RAISED_DURING).
|
|
544
|
+
// The live close-out will produce a Tunnel without provenance
|
|
545
|
+
// edges in that case; better than silently dropping the close-out.
|
|
546
|
+
conversationKey: resolvedConversationKey ?? conversationKeyForState ?? "",
|
|
547
|
+
tunnelId: tunnelState.tunnelId,
|
|
548
|
+
tunnelName: tunnelState.tunnelName,
|
|
549
|
+
hostnames: void 0,
|
|
550
|
+
status: "completed"
|
|
551
|
+
});
|
|
552
|
+
resolved.push({ taskId, resolution: "completed" });
|
|
553
|
+
process.stderr.write(
|
|
554
|
+
`[task] action-recover kind=${TASK_KIND} taskId=${taskId} age=${Math.round(ageMs / 1e3)}s tunnel-state-found=true tunnel-already-written=false resolution=completed
|
|
555
|
+
`
|
|
556
|
+
);
|
|
557
|
+
} else {
|
|
558
|
+
const diagnostic = ageMs > RECONCILE_HARD_AGE_MS ? CLOUDFLARE_TASK_DIAGNOSTICS.noTunnelStateOnDisk : CLOUDFLARE_TASK_DIAGNOSTICS.endpointDiedPreReconcile;
|
|
559
|
+
await completeCloudflareTask({
|
|
560
|
+
taskId,
|
|
561
|
+
taskElementId,
|
|
562
|
+
accountId,
|
|
563
|
+
conversationKey: conversationKeyForState ?? "",
|
|
564
|
+
status: "failed",
|
|
565
|
+
errorMessage: diagnostic
|
|
566
|
+
});
|
|
567
|
+
resolved.push({ taskId, resolution: `failed-${diagnostic}` });
|
|
568
|
+
process.stderr.write(
|
|
569
|
+
`[task] action-recover kind=${TASK_KIND} taskId=${taskId} age=${Math.round(ageMs / 1e3)}s tunnel-state-found=false resolution=failed errorMessage=${diagnostic}
|
|
570
|
+
`
|
|
571
|
+
);
|
|
572
|
+
}
|
|
573
|
+
} catch (err) {
|
|
574
|
+
process.stderr.write(
|
|
575
|
+
`[task] action-recover kind=${TASK_KIND} taskId=${taskId} resolution=error reason="${err instanceof Error ? err.message : String(err)}"
|
|
576
|
+
`
|
|
577
|
+
);
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
return { scanned: stale.records.length, resolved };
|
|
581
|
+
} finally {
|
|
582
|
+
await session.close();
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
export {
|
|
587
|
+
openCloudflareTask,
|
|
588
|
+
appendCloudflareSteps,
|
|
589
|
+
completeCloudflareTask,
|
|
590
|
+
readTunnelState,
|
|
591
|
+
CLOUDFLARE_TASK_DIAGNOSTICS,
|
|
592
|
+
recoverRunningCloudflareTasks
|
|
593
|
+
};
|