@totalreclaw/totalreclaw 3.3.1-rc.2 → 3.3.1-rc.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/CHANGELOG.md +330 -0
  2. package/SKILL.md +50 -83
  3. package/api-client.ts +18 -11
  4. package/config.ts +117 -3
  5. package/crypto.ts +10 -2
  6. package/dist/api-client.js +226 -0
  7. package/dist/billing-cache.js +100 -0
  8. package/dist/claims-helper.js +606 -0
  9. package/dist/config.js +280 -0
  10. package/dist/consolidation.js +258 -0
  11. package/dist/contradiction-sync.js +1034 -0
  12. package/dist/crypto.js +138 -0
  13. package/dist/digest-sync.js +361 -0
  14. package/dist/download-ux.js +63 -0
  15. package/dist/embedding.js +86 -0
  16. package/dist/extractor.js +1225 -0
  17. package/dist/first-run.js +103 -0
  18. package/dist/fs-helpers.js +563 -0
  19. package/dist/gateway-url.js +197 -0
  20. package/dist/generate-mnemonic.js +13 -0
  21. package/dist/hot-cache-wrapper.js +101 -0
  22. package/dist/import-adapters/base-adapter.js +64 -0
  23. package/dist/import-adapters/chatgpt-adapter.js +238 -0
  24. package/dist/import-adapters/claude-adapter.js +114 -0
  25. package/dist/import-adapters/gemini-adapter.js +201 -0
  26. package/dist/import-adapters/index.js +26 -0
  27. package/dist/import-adapters/mcp-memory-adapter.js +219 -0
  28. package/dist/import-adapters/mem0-adapter.js +158 -0
  29. package/dist/import-adapters/types.js +1 -0
  30. package/dist/index.js +5348 -0
  31. package/dist/llm-client.js +686 -0
  32. package/dist/llm-profile-reader.js +346 -0
  33. package/dist/lsh.js +62 -0
  34. package/dist/onboarding-cli.js +750 -0
  35. package/dist/pair-cli.js +344 -0
  36. package/dist/pair-crypto.js +359 -0
  37. package/dist/pair-http.js +404 -0
  38. package/dist/pair-page.js +826 -0
  39. package/dist/pair-qr.js +107 -0
  40. package/dist/pair-remote-client.js +410 -0
  41. package/dist/pair-session-store.js +566 -0
  42. package/dist/pin.js +542 -0
  43. package/dist/qa-bug-report.js +301 -0
  44. package/dist/relay-headers.js +44 -0
  45. package/dist/reranker.js +442 -0
  46. package/dist/retype-setscope.js +348 -0
  47. package/dist/semantic-dedup.js +75 -0
  48. package/dist/subgraph-search.js +289 -0
  49. package/dist/subgraph-store.js +694 -0
  50. package/dist/tool-gating.js +58 -0
  51. package/download-ux.ts +91 -0
  52. package/embedding.ts +32 -9
  53. package/fs-helpers.ts +124 -0
  54. package/gateway-url.ts +57 -9
  55. package/index.ts +586 -357
  56. package/llm-client.ts +211 -23
  57. package/lsh.ts +7 -2
  58. package/onboarding-cli.ts +114 -1
  59. package/package.json +19 -5
  60. package/pair-cli.ts +76 -8
  61. package/pair-crypto.ts +34 -24
  62. package/pair-page.ts +28 -17
  63. package/pair-qr.ts +152 -0
  64. package/pair-remote-client.ts +540 -0
  65. package/qa-bug-report.ts +381 -0
  66. package/relay-headers.ts +50 -0
  67. package/reranker.ts +73 -0
  68. package/retype-setscope.ts +12 -0
  69. package/subgraph-search.ts +4 -3
  70. package/subgraph-store.ts +109 -16
package/dist/pin.js ADDED
@@ -0,0 +1,542 @@
1
+ /** Pin/unpin pure operation for OpenClaw plugin — v1.1 taxonomy.
2
+ *
3
+ * As of core 2.1.1 / plugin pin path v1.1 (2026-04-19) the pin/unpin operation
4
+ * emits a canonical v1.1 MemoryClaimV1 JSON blob (schema_version "1.0",
5
+ * `pin_status` additive field) wrapped in the outer protobuf at `version = 4`.
6
+ * The prior behavior — emitting v0 short-key blobs at `version = 3` on the
7
+ * pin path — broke the v1 on-chain contract (RC QA bug #2). v0 blobs continue
8
+ * to be READ correctly (via parseBlobForPin's fall-through), so mixed-version
9
+ * vaults remain uniform from the user's point of view.
10
+ */
11
+ import crypto from 'node:crypto';
12
+ import { createRequire } from 'node:module';
13
+ import { buildV1ClaimBlob, mapTypeToCategory, readV1Blob, } from './claims-helper.js';
14
+ import { findLoserClaimInDecisionLog, maybeWriteFeedbackForPin, } from './contradiction-sync.js';
15
+ import { isValidMemoryType, V0_TO_V1_TYPE } from './extractor.js';
16
+ import { PROTOBUF_VERSION_V4 } from './subgraph-store.js';
17
+ // Lazy-load WASM core (mirrors claims-helper.ts pattern — plays nicely under
18
+ // both the OpenClaw runtime (CJS-ish tsx) and bare Node ESM used by tests).
19
+ const requireWasm = createRequire(import.meta.url);
20
+ let _wasm = null;
21
+ function getWasm() {
22
+ if (!_wasm)
23
+ _wasm = requireWasm('@totalreclaw/core');
24
+ return _wasm;
25
+ }
26
+ /**
27
+ * Encode a FactPayload as the minimal Protobuf wire format via WASM core.
28
+ *
29
+ * The `version` field is threaded through so callers can opt into
30
+ * `PROTOBUF_VERSION_V4` (Memory Taxonomy v1) for the new-fact write and leave
31
+ * tombstone rows at the default (legacy v3). When omitted, defaults to v1
32
+ * (`PROTOBUF_VERSION_V4`) — pin/unpin is a v1 write path.
33
+ */
34
+ function encodeFactProtobufLocal(fact, version = PROTOBUF_VERSION_V4) {
35
+ const json = JSON.stringify({
36
+ id: fact.id,
37
+ timestamp: fact.timestamp,
38
+ owner: fact.owner,
39
+ encrypted_blob_hex: fact.encryptedBlob,
40
+ blind_indices: fact.blindIndices,
41
+ decay_score: fact.decayScore,
42
+ source: fact.source,
43
+ content_fp: fact.contentFp,
44
+ agent_id: fact.agentId,
45
+ encrypted_embedding: fact.encryptedEmbedding || null,
46
+ version,
47
+ });
48
+ return Buffer.from(getWasm().encodeFactProtobuf(json));
49
+ }
50
+ const SHORT_TO_HUMAN = {
51
+ a: 'active',
52
+ p: 'pinned',
53
+ s: 'superseded',
54
+ r: 'retracted',
55
+ c: 'contradicted',
56
+ };
57
+ const HUMAN_TO_SHORT = {
58
+ active: 'a',
59
+ pinned: 'p',
60
+ superseded: 's',
61
+ retracted: 'r',
62
+ contradicted: 'c',
63
+ };
64
+ /** Parse a decrypted blob into a canonical mutable Claim + current human status. */
65
+ export function parseBlobForPin(decrypted) {
66
+ let obj;
67
+ try {
68
+ obj = JSON.parse(decrypted);
69
+ }
70
+ catch {
71
+ const shortClaim = buildCanonicalObjectFromLegacy(decrypted, {});
72
+ return {
73
+ source: { kind: 'v0', claim: shortClaim },
74
+ claim: shortClaim,
75
+ currentStatus: 'active',
76
+ isLegacy: true,
77
+ };
78
+ }
79
+ // v1 payload (plugin v3.0.0+): long-form fields + schema_version "1.x".
80
+ // Preserve the v1 structure so the pin path can emit v1 on output.
81
+ if (typeof obj.text === 'string' &&
82
+ typeof obj.type === 'string' &&
83
+ typeof obj.schema_version === 'string' &&
84
+ obj.schema_version.startsWith('1.')) {
85
+ const v1 = readV1Blob(decrypted);
86
+ if (v1) {
87
+ // Current status = pinStatus if present, else active.
88
+ const human = v1.pinStatus === 'pinned' ? 'pinned' : 'active';
89
+ const shortProjection = v1ToShortKeyClaim(obj);
90
+ return {
91
+ source: {
92
+ kind: 'v1',
93
+ text: v1.text,
94
+ type: v1.type,
95
+ source: v1.source,
96
+ scope: v1.scope,
97
+ volatility: v1.volatility,
98
+ reasoning: v1.reasoning,
99
+ entities: v1.entities,
100
+ importance: v1.importance,
101
+ confidence: v1.confidence,
102
+ createdAt: v1.createdAt,
103
+ expiresAt: v1.expiresAt,
104
+ id: v1.id,
105
+ pinStatus: v1.pinStatus,
106
+ },
107
+ claim: shortProjection,
108
+ currentStatus: human,
109
+ isLegacy: false,
110
+ };
111
+ }
112
+ // readV1Blob returned null — fall through to v0 path.
113
+ }
114
+ // v0 canonical Claim — short keys present.
115
+ if (typeof obj.t === 'string' && typeof obj.c === 'string') {
116
+ const st = typeof obj.st === 'string' ? obj.st : 'a';
117
+ const human = SHORT_TO_HUMAN[st] ?? 'active';
118
+ const cloned = JSON.parse(JSON.stringify(obj));
119
+ return {
120
+ source: { kind: 'v0', claim: cloned },
121
+ claim: cloned,
122
+ currentStatus: human,
123
+ isLegacy: false,
124
+ };
125
+ }
126
+ // Legacy {text, metadata: {importance: 0-1}} shape.
127
+ if (typeof obj.text === 'string') {
128
+ const meta = obj.metadata ?? {};
129
+ const shortClaim = buildCanonicalObjectFromLegacy(obj.text, meta);
130
+ return {
131
+ source: { kind: 'v0', claim: shortClaim },
132
+ claim: shortClaim,
133
+ currentStatus: 'active',
134
+ isLegacy: true,
135
+ };
136
+ }
137
+ const shortClaim = buildCanonicalObjectFromLegacy(decrypted, {});
138
+ return {
139
+ source: { kind: 'v0', claim: shortClaim },
140
+ claim: shortClaim,
141
+ currentStatus: 'active',
142
+ isLegacy: true,
143
+ };
144
+ }
145
+ /**
146
+ * Convert a Memory Taxonomy v1 blob object into the short-key shape that
147
+ * the rest of pin.ts manipulates. Pin operations tombstone the existing
148
+ * fact and write a fresh one with the short-key format; the v1 inner blob
149
+ * is not round-tripped through pin (that would require upgrading every
150
+ * downstream read site). Since pin already rewrites the fact with new
151
+ * indices, round-trip fidelity isn't required.
152
+ */
153
+ function v1ToShortKeyClaim(v1) {
154
+ const text = typeof v1.text === 'string' ? v1.text : '';
155
+ const type = typeof v1.type === 'string' ? v1.type : 'claim';
156
+ // Map v1 type to the short category key used by the v0 format.
157
+ const category = isValidMemoryType(type) ? mapTypeToCategory(type) : 'fact';
158
+ const impNum = typeof v1.importance === 'number' ? v1.importance : 5;
159
+ const importance = Math.max(1, Math.min(10, Math.round(impNum)));
160
+ const confidence = typeof v1.confidence === 'number' ? v1.confidence : 0.85;
161
+ const source = typeof v1.source === 'string' ? v1.source : 'openclaw-plugin';
162
+ const createdAt = typeof v1.created_at === 'string' ? v1.created_at : new Date().toISOString();
163
+ const out = {
164
+ t: text,
165
+ c: category,
166
+ cf: confidence,
167
+ i: importance,
168
+ sa: source,
169
+ ea: createdAt,
170
+ };
171
+ if (Array.isArray(v1.entities) && v1.entities.length > 0) {
172
+ out.e = v1.entities
173
+ .map((e) => {
174
+ if (!e || typeof e !== 'object')
175
+ return null;
176
+ const entity = e;
177
+ const name = typeof entity.name === 'string' ? entity.name : '';
178
+ const entType = typeof entity.type === 'string' ? entity.type : 'concept';
179
+ if (!name)
180
+ return null;
181
+ const short = { n: name, tp: entType };
182
+ if (typeof entity.role === 'string' && entity.role.length > 0) {
183
+ short.r = entity.role;
184
+ }
185
+ return short;
186
+ })
187
+ .filter((e) => e !== null);
188
+ }
189
+ return out;
190
+ }
191
+ function buildCanonicalObjectFromLegacy(text, meta) {
192
+ // Phase 2.2.6: use the single-source-of-truth mapping from claims-helper
193
+ // instead of a local duplicate. Legacy blobs can carry arbitrary strings in
194
+ // `metadata.type`, so we validate via `isValidMemoryType` before mapping —
195
+ // unknown types fall back to 'fact'.
196
+ const typeStr = typeof meta.type === 'string' ? meta.type : 'fact';
197
+ const category = isValidMemoryType(typeStr) ? mapTypeToCategory(typeStr) : 'fact';
198
+ const impFloat = typeof meta.importance === 'number' ? meta.importance : 0.5;
199
+ const importance = Math.max(1, Math.min(10, Math.round(impFloat * 10)));
200
+ const source = typeof meta.source === 'string' ? meta.source : 'openclaw-plugin';
201
+ const createdAt = typeof meta.created_at === 'string' ? meta.created_at : new Date().toISOString();
202
+ return {
203
+ t: text,
204
+ c: category,
205
+ cf: 0.85,
206
+ i: importance,
207
+ sa: source,
208
+ ea: createdAt,
209
+ };
210
+ }
211
+ /**
212
+ * Project a source blob (v1 or v0 short-key) into the v1 shape needed by
213
+ * `buildV1ClaimBlob`. For v1 sources this is identity; for v0 sources we
214
+ * upgrade the category / source fields per the spec's legacy-mapping table
215
+ * (`fact|context|decision → claim`, `rule → directive`, `goal → commitment`,
216
+ * etc.). Anything we can't determine falls back to a sensible default so the
217
+ * build call doesn't throw.
218
+ */
219
+ function projectToV1(src, defaultSourceAgent) {
220
+ if (src.kind === 'v1') {
221
+ return {
222
+ text: src.text,
223
+ type: src.type,
224
+ source: src.source,
225
+ scope: src.scope,
226
+ volatility: src.volatility,
227
+ reasoning: src.reasoning,
228
+ entities: src.entities,
229
+ importance: src.importance,
230
+ confidence: src.confidence,
231
+ };
232
+ }
233
+ // v0 path — upgrade short-key claim to v1.
234
+ const claim = src.claim;
235
+ const text = typeof claim.t === 'string' ? claim.t : '';
236
+ const v0Category = typeof claim.c === 'string' ? claim.c : 'fact';
237
+ // Legacy short category keys back to type names (reverse of TYPE_TO_CATEGORY_V0).
238
+ const V0_CATEGORY_TO_V0_TYPE = {
239
+ fact: 'fact',
240
+ pref: 'preference',
241
+ dec: 'decision',
242
+ epi: 'episodic',
243
+ goal: 'goal',
244
+ ctx: 'context',
245
+ sum: 'summary',
246
+ rule: 'rule',
247
+ ent: 'fact', // entity records don't round-trip as v1 claims; fall back
248
+ dig: 'summary',
249
+ claim: 'claim',
250
+ };
251
+ const v0TypeToken = V0_CATEGORY_TO_V0_TYPE[v0Category] ?? 'fact';
252
+ // Use the shared v0→v1 map for the upgrade.
253
+ const v1Type = V0_TO_V1_TYPE[v0TypeToken] ?? 'claim';
254
+ const importance = typeof claim.i === 'number'
255
+ ? Math.max(1, Math.min(10, Math.round(claim.i)))
256
+ : 5;
257
+ const confidence = typeof claim.cf === 'number' ? claim.cf : 0.85;
258
+ // v0 `sa` isn't a provenance source — it's a "source agent" string like
259
+ // "openclaw-plugin". Map heuristically: if it looks like an agent-style
260
+ // string (contains "plugin"/"agent"/"derived"), mark it as appropriate;
261
+ // otherwise default to "user-inferred" so Tier 1 reranker doesn't give it
262
+ // "user" trust (which would be wrong for legacy blobs with no provenance
263
+ // signal).
264
+ const sa = typeof claim.sa === 'string' ? claim.sa : defaultSourceAgent;
265
+ let v1Source = 'user-inferred';
266
+ const saLower = sa.toLowerCase();
267
+ if (saLower.includes('derived') || saLower.includes('digest') || saLower.includes('consolidat')) {
268
+ v1Source = 'derived';
269
+ }
270
+ else if (saLower.includes('assistant')) {
271
+ v1Source = 'assistant';
272
+ }
273
+ else if (saLower.includes('extern') || saLower.includes('mem0') || saLower.includes('import')) {
274
+ v1Source = 'external';
275
+ }
276
+ const entities = Array.isArray(claim.e)
277
+ ? claim.e
278
+ .map((e) => {
279
+ if (!e || typeof e !== 'object')
280
+ return null;
281
+ const entity = e;
282
+ const name = typeof entity.n === 'string' ? entity.n : '';
283
+ const entType = typeof entity.tp === 'string' ? entity.tp : 'concept';
284
+ if (!name)
285
+ return null;
286
+ const out = { name, type: entType };
287
+ if (typeof entity.r === 'string' && entity.r.length > 0)
288
+ out.role = entity.r;
289
+ return out;
290
+ })
291
+ .filter((x) => x !== null)
292
+ : undefined;
293
+ return {
294
+ text,
295
+ type: v1Type,
296
+ source: v1Source,
297
+ importance,
298
+ confidence,
299
+ entities,
300
+ };
301
+ }
302
+ /**
303
+ * Execute a pin or unpin operation on a single fact.
304
+ *
305
+ * The subgraph is append-only, so a status change requires writing a new fact
306
+ * with the updated status and tombstoning the old one. The new fact's `sup`
307
+ * field points to the old fact id, forming a cross-device-visible supersession
308
+ * chain. Matches MCP's `executePinOperation` byte-for-byte on the supersession
309
+ * semantics (short keys, idempotent no-op, decayScore=1.0, trapdoor regen).
310
+ */
311
+ export async function executePinOperation(factId, targetStatus, deps, reason) {
312
+ // 1. Fetch the existing fact
313
+ const existing = await deps.fetchFactById(factId);
314
+ if (!existing) {
315
+ return {
316
+ success: false,
317
+ fact_id: factId,
318
+ error: `Fact not found: ${factId}`,
319
+ };
320
+ }
321
+ // 2. Decrypt + parse current status
322
+ const blobHex = existing.encryptedBlob.startsWith('0x')
323
+ ? existing.encryptedBlob.slice(2)
324
+ : existing.encryptedBlob;
325
+ let plaintext;
326
+ let recoveredFromDecisionLog = false;
327
+ try {
328
+ plaintext = deps.decryptBlob(blobHex);
329
+ }
330
+ catch (err) {
331
+ // Phase 2.1 recovery path: if the on-chain blob is a tombstone (1-byte
332
+ // `0x00` written by an auto-resolved supersede), the cipher will fail
333
+ // because the ciphertext is shorter than the auth tag. Fall back to the
334
+ // canonical Claim JSON we stashed in `decisions.jsonl` at supersede time.
335
+ // Without this fallback, the user can never override an auto-resolution
336
+ // and the weight-tuning loop never receives gradient signal.
337
+ const errMsg = err instanceof Error ? err.message : String(err);
338
+ const looksLikeTombstone = blobHex === '00' ||
339
+ blobHex === '' ||
340
+ errMsg.includes('Encrypted data too short') ||
341
+ errMsg.includes('too short') ||
342
+ errMsg.includes('Cipher');
343
+ if (!looksLikeTombstone) {
344
+ return {
345
+ success: false,
346
+ fact_id: factId,
347
+ error: `Failed to decrypt fact: ${errMsg}`,
348
+ };
349
+ }
350
+ const recovered = findLoserClaimInDecisionLog(factId);
351
+ if (!recovered) {
352
+ return {
353
+ success: false,
354
+ fact_id: factId,
355
+ error: `Failed to decrypt fact and no recovery row in decisions.jsonl: ${errMsg}. ` +
356
+ 'The fact may have been tombstoned by an auto-resolution that predates Phase 2.1 ' +
357
+ '(when loser_claim_json was added to the decision log).',
358
+ };
359
+ }
360
+ plaintext = recovered;
361
+ recoveredFromDecisionLog = true;
362
+ deps.logger?.info?.(`pin: recovered loser claim from decisions.jsonl for ${factId.slice(0, 10)}…`);
363
+ }
364
+ const parsed = parseBlobForPin(plaintext);
365
+ // Recovered claims always represent a fact the user is trying to override —
366
+ // never short-circuit the operation as idempotent because the `st` field on
367
+ // the recovered loser was whatever the original auto-resolution stored
368
+ // (typically active). Drop the previous status so the targetStatus check
369
+ // below produces a real on-chain write.
370
+ if (recoveredFromDecisionLog) {
371
+ parsed.currentStatus = 'active';
372
+ }
373
+ // 3. Idempotent early-exit
374
+ if (parsed.currentStatus === targetStatus) {
375
+ return {
376
+ success: true,
377
+ fact_id: factId,
378
+ previous_status: parsed.currentStatus,
379
+ new_status: targetStatus,
380
+ idempotent: true,
381
+ reason,
382
+ };
383
+ }
384
+ // 4. Build the new canonical v1.1 claim with pin_status + superseded_by link.
385
+ //
386
+ // The new blob is ALWAYS v1.1 shaped (schema_version "1.0", pin_status
387
+ // present) regardless of the source blob's format. v0 sources are upgraded
388
+ // to v1 on the pin path; v1 sources round-trip their metadata (source,
389
+ // scope, reasoning, entities, volatility) into the new blob.
390
+ const pinStatus = targetStatus === 'pinned' ? 'pinned' : 'unpinned';
391
+ const newFactId = crypto.randomUUID();
392
+ // Project the source blob into v1 shape. For v0 sources we upgrade on the
393
+ // fly: short-key `c` → v1 type, `sa` → source (heuristic), etc.
394
+ const v1View = projectToV1(parsed.source, deps.sourceAgent);
395
+ let canonicalJson;
396
+ try {
397
+ canonicalJson = buildV1ClaimBlob({
398
+ id: newFactId,
399
+ text: v1View.text,
400
+ type: v1View.type,
401
+ source: v1View.source,
402
+ scope: v1View.scope,
403
+ volatility: v1View.volatility,
404
+ reasoning: v1View.reasoning,
405
+ entities: v1View.entities,
406
+ importance: v1View.importance,
407
+ confidence: v1View.confidence,
408
+ createdAt: new Date().toISOString(),
409
+ supersededBy: factId,
410
+ pinStatus,
411
+ });
412
+ }
413
+ catch (err) {
414
+ return {
415
+ success: false,
416
+ fact_id: factId,
417
+ error: `Failed to build v1 claim blob: ${err instanceof Error ? err.message : String(err)}`,
418
+ };
419
+ }
420
+ // 5. Encrypt the new blob
421
+ let newBlobHex;
422
+ try {
423
+ newBlobHex = deps.encryptBlob(canonicalJson);
424
+ }
425
+ catch (err) {
426
+ return {
427
+ success: false,
428
+ fact_id: factId,
429
+ error: `Failed to encrypt updated claim: ${err instanceof Error ? err.message : String(err)}`,
430
+ };
431
+ }
432
+ // 5b. Regenerate trapdoors so the new fact is findable by the same text.
433
+ const entityNames = v1View.entities
434
+ ? v1View.entities.map((e) => e.name).filter((n) => typeof n === 'string' && n.length > 0)
435
+ : [];
436
+ let regenerated;
437
+ try {
438
+ regenerated = await deps.generateIndices(v1View.text, entityNames);
439
+ }
440
+ catch {
441
+ regenerated = { blindIndices: [] };
442
+ }
443
+ // 6. Build tombstone + new protobuf payloads.
444
+ //
445
+ // Tombstone: empty blob ('00'), empty indices, decayScore=0, source='tombstone'.
446
+ // Written at the DEFAULT protobuf version (legacy v3) because tombstone rows
447
+ // carry no inner blob — the version field is irrelevant for readers and
448
+ // writing v3 keeps round-trip compat with any pre-v1 tombstone parser.
449
+ const tombstonePayload = {
450
+ id: factId,
451
+ timestamp: new Date().toISOString(),
452
+ owner: deps.owner,
453
+ encryptedBlob: '00',
454
+ blindIndices: [],
455
+ decayScore: 0,
456
+ source: 'tombstone',
457
+ contentFp: '',
458
+ agentId: deps.sourceAgent,
459
+ };
460
+ const newPayload = {
461
+ id: newFactId,
462
+ timestamp: new Date().toISOString(),
463
+ owner: deps.owner,
464
+ encryptedBlob: newBlobHex,
465
+ blindIndices: regenerated.blindIndices,
466
+ decayScore: 1.0,
467
+ source: targetStatus === 'pinned' ? 'openclaw-plugin-pin' : 'openclaw-plugin-unpin',
468
+ contentFp: '',
469
+ agentId: deps.sourceAgent,
470
+ encryptedEmbedding: regenerated.encryptedEmbedding,
471
+ };
472
+ // Outer protobuf version: v=4 for the new v1 claim, default (legacy v3)
473
+ // for the tombstone. This is the core of the bug-2 fix — previously both
474
+ // payloads went out at version=3 and the inner blob was v0 short-key.
475
+ const payloads = [
476
+ encodeFactProtobufLocal(tombstonePayload, /* version = legacy v3 */ 3),
477
+ encodeFactProtobufLocal(newPayload, PROTOBUF_VERSION_V4),
478
+ ];
479
+ // 6b. Slice 2f: consult decisions.jsonl to see if this pin/unpin contradicts
480
+ // a prior auto-resolution. If so, append a counterexample to feedback.jsonl
481
+ // so the next digest-compile's tuning loop can nudge the weights. Voluntary
482
+ // pins (no matching decision) produce no feedback row. Never fatal.
483
+ const feedbackLogger = deps.logger ?? {
484
+ info: () => { },
485
+ warn: () => { },
486
+ };
487
+ try {
488
+ await maybeWriteFeedbackForPin(factId, targetStatus, Math.floor(Date.now() / 1000), feedbackLogger);
489
+ }
490
+ catch {
491
+ // Feedback wiring is best-effort — never block the pin op.
492
+ }
493
+ // 7. Submit both in a single batch UserOp.
494
+ try {
495
+ const { txHash, success } = await deps.submitBatch(payloads);
496
+ if (!success) {
497
+ return {
498
+ success: false,
499
+ fact_id: factId,
500
+ previous_status: parsed.currentStatus,
501
+ error: 'On-chain batch submission failed',
502
+ tx_hash: txHash,
503
+ };
504
+ }
505
+ return {
506
+ success: true,
507
+ fact_id: factId,
508
+ new_fact_id: newFactId,
509
+ previous_status: parsed.currentStatus,
510
+ new_status: targetStatus,
511
+ tx_hash: txHash,
512
+ reason,
513
+ };
514
+ }
515
+ catch (err) {
516
+ return {
517
+ success: false,
518
+ fact_id: factId,
519
+ previous_status: parsed.currentStatus,
520
+ error: `Failed to submit pin batch: ${err instanceof Error ? err.message : String(err)}`,
521
+ };
522
+ }
523
+ }
524
+ /** Validate the `{fact_id, reason?}` input shape for pin/unpin tool calls. */
525
+ export function validatePinArgs(args) {
526
+ if (!args || typeof args !== 'object') {
527
+ return { ok: false, factId: '', error: 'Invalid input: fact_id is required' };
528
+ }
529
+ const record = args;
530
+ const factId = record.fact_id;
531
+ if (factId === undefined || factId === null) {
532
+ return { ok: false, factId: '', error: 'Invalid input: fact_id is required' };
533
+ }
534
+ if (typeof factId !== 'string') {
535
+ return { ok: false, factId: '', error: 'Invalid input: fact_id must be a non-empty string' };
536
+ }
537
+ if (factId.trim().length === 0) {
538
+ return { ok: false, factId: '', error: 'Invalid input: fact_id must be a non-empty string' };
539
+ }
540
+ const reason = typeof record.reason === 'string' ? record.reason : undefined;
541
+ return { ok: true, factId: factId.trim(), reason, error: '' };
542
+ }