@joshuaswarren/openclaw-engram 9.0.7 → 9.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -469,6 +469,7 @@ function parseConfig(raw) {
469
469
  graphExpansionBlendMin: typeof cfg.graphExpansionBlendMin === "number" ? Math.min(1, Math.max(0, cfg.graphExpansionBlendMin)) : 0.05,
470
470
  graphExpansionBlendMax: typeof cfg.graphExpansionBlendMax === "number" ? Math.min(1, Math.max(0, cfg.graphExpansionBlendMax)) : 0.95,
471
471
  maxEntityGraphEdgesPerMemory: typeof cfg.maxEntityGraphEdgesPerMemory === "number" ? Math.max(0, cfg.maxEntityGraphEdgesPerMemory) : 10,
472
+ delinearizeEnabled: cfg.delinearizeEnabled !== false,
472
473
  recallConfidenceGateEnabled: cfg.recallConfidenceGateEnabled === true,
473
474
  recallConfidenceGateThreshold: typeof cfg.recallConfidenceGateThreshold === "number" ? Math.max(0, Math.min(1, cfg.recallConfidenceGateThreshold)) : 0.12,
474
475
  graphLateralInhibitionEnabled: cfg.graphLateralInhibitionEnabled !== false,
@@ -803,6 +804,133 @@ function chunkContent(content, config = DEFAULT_CHUNKING_CONFIG) {
803
804
  // src/extraction.ts
804
805
  import OpenAI from "openai";
805
806
 
807
+ // src/delinearize.ts
808
+ var PRONOUN_MAP = {
809
+ "he": { types: ["person"], possessive: false, group: "masc" },
810
+ "him": { types: ["person"], possessive: false, group: "masc" },
811
+ "his": { types: ["person"], possessive: true, group: "masc" },
812
+ "she": { types: ["person"], possessive: false, group: "fem" },
813
+ // "her" omitted — ambiguous between object ("saw her") and possessive ("her stack")
814
+ "they": { types: ["company", "project", "other"], possessive: false, group: "they" },
815
+ "them": { types: ["company", "project", "other"], possessive: false, group: "they" },
816
+ "their": { types: ["company", "project", "other"], possessive: true, group: "they" },
817
+ "it": { types: ["project", "tool", "company"], possessive: false, group: "it" },
818
+ "its": { types: ["project", "tool", "company"], possessive: true, group: "it" }
819
+ };
820
+ function resolveCoReferences(fact, entities) {
821
+ if (entities.length === 0) return fact;
822
+ const matchingEntries = [];
823
+ const typeToGroups = /* @__PURE__ */ new Map();
824
+ for (const [pronoun, info] of Object.entries(PRONOUN_MAP)) {
825
+ const regex = new RegExp(`\\b${pronoun}\\b(?![''\u2019])`, "gi");
826
+ if (!regex.test(fact)) continue;
827
+ const candidates = entities.filter((e) => info.types.includes(e.type));
828
+ if (candidates.length !== 1) continue;
829
+ matchingEntries.push({ pronoun, info, entity: candidates[0] });
830
+ const matchedType = candidates[0].type;
831
+ if (!typeToGroups.has(matchedType)) typeToGroups.set(matchedType, /* @__PURE__ */ new Set());
832
+ typeToGroups.get(matchedType).add(info.group);
833
+ }
834
+ const ambiguousTypes = /* @__PURE__ */ new Set();
835
+ for (const [type, groups] of typeToGroups) {
836
+ const matchedEntities = entities.filter((e) => e.type === type);
837
+ if (matchedEntities.length === 1 && groups.size > 1) {
838
+ ambiguousTypes.add(type);
839
+ }
840
+ }
841
+ const safeEntries = matchingEntries.filter(
842
+ (e) => !e.info.types.some((t) => ambiguousTypes.has(t))
843
+ );
844
+ if (safeEntries.length === 0) return fact;
845
+ let result = fact;
846
+ const placeholders = [];
847
+ for (const { pronoun, info, entity } of safeEntries) {
848
+ const placeholder = `\0ENTITY_${placeholders.length}\0`;
849
+ const safeEntityName = entity.name.replace(/\$/g, "$$$$");
850
+ const replacement = info.possessive ? `${safeEntityName}'s` : safeEntityName;
851
+ placeholders.push(replacement);
852
+ result = result.replace(
853
+ new RegExp(`\\b${pronoun}\\b(?![''\u2019])`, "gi"),
854
+ placeholder
855
+ );
856
+ }
857
+ for (let i = 0; i < placeholders.length; i++) {
858
+ result = result.replaceAll(`\0ENTITY_${i}\0`, placeholders[i]);
859
+ }
860
+ return result;
861
+ }
862
+ function formatDate(d) {
863
+ return d.toISOString().slice(0, 10);
864
+ }
865
+ var TEMPORAL_PATTERNS = [
866
+ {
867
+ pattern: /\bearlier today\b/gi,
868
+ replace: (now) => `earlier on ${formatDate(now)}`
869
+ },
870
+ {
871
+ pattern: /\bthis morning\b/gi,
872
+ replace: (now) => `on the morning of ${formatDate(now)}`
873
+ },
874
+ {
875
+ pattern: /\bthis afternoon\b/gi,
876
+ replace: (now) => `on the afternoon of ${formatDate(now)}`
877
+ },
878
+ {
879
+ pattern: /\bthis evening\b/gi,
880
+ replace: (now) => `on the evening of ${formatDate(now)}`
881
+ },
882
+ {
883
+ pattern: /\blast week\b/gi,
884
+ replace: (now) => {
885
+ const d = new Date(now);
886
+ d.setUTCDate(d.getUTCDate() - 7);
887
+ return `around ${formatDate(d)}`;
888
+ }
889
+ },
890
+ {
891
+ pattern: /\blast month\b/gi,
892
+ replace: (now) => {
893
+ const d = new Date(now);
894
+ d.setUTCDate(1);
895
+ d.setUTCMonth(d.getUTCMonth() - 1);
896
+ return `around ${formatDate(d)}`;
897
+ }
898
+ },
899
+ // Simple patterns last
900
+ {
901
+ pattern: /\byesterday\b/gi,
902
+ replace: (now) => {
903
+ const d = new Date(now);
904
+ d.setUTCDate(d.getUTCDate() - 1);
905
+ return `on ${formatDate(d)}`;
906
+ }
907
+ },
908
+ {
909
+ pattern: /\btoday\b/gi,
910
+ replace: (now) => `on ${formatDate(now)}`
911
+ },
912
+ {
913
+ pattern: /\btomorrow\b/gi,
914
+ replace: (now) => {
915
+ const d = new Date(now);
916
+ d.setUTCDate(d.getUTCDate() + 1);
917
+ return `on ${formatDate(d)}`;
918
+ }
919
+ }
920
+ ];
921
+ function anchorTemporalExpressions(fact, now) {
922
+ let result = fact;
923
+ for (const { pattern, replace } of TEMPORAL_PATTERNS) {
924
+ result = result.replace(pattern, replace(now));
925
+ }
926
+ return result;
927
+ }
928
+ function delinearize(factContent, entities, timestamp) {
929
+ let result = anchorTemporalExpressions(factContent, timestamp);
930
+ result = resolveCoReferences(result, entities);
931
+ return result;
932
+ }
933
+
806
934
  // src/local-llm.ts
807
935
  import { spawnSync } from "child_process";
808
936
  import { existsSync, readFileSync } from "fs";
@@ -2286,13 +2414,18 @@ var ExtractionEngine = class {
2286
2414
  } catch {
2287
2415
  }
2288
2416
  }
2289
- sanitizeExtractionResult(result) {
2417
+ sanitizeExtractionResult(result, messageTimestamp) {
2418
+ const ts = messageTimestamp ?? /* @__PURE__ */ new Date();
2290
2419
  const facts = result.facts.map((fact) => {
2291
2420
  const sanitized = sanitizeMemoryContent(fact.content);
2292
2421
  if (!sanitized.clean) {
2293
2422
  log.warn(`extraction fact sanitized; violations=${sanitized.violations.join(", ")}`);
2294
2423
  }
2295
- return { ...fact, content: sanitized.text };
2424
+ let content = sanitized.text;
2425
+ if (this.config.delinearizeEnabled) {
2426
+ content = delinearize(content, result.entities, ts);
2427
+ }
2428
+ return { ...fact, content };
2296
2429
  });
2297
2430
  return { ...result, facts };
2298
2431
  }
@@ -2467,6 +2600,8 @@ var ExtractionEngine = class {
2467
2600
  log.debug("extraction skipped \u2014 conversation only contained non-memory work-layer context");
2468
2601
  return { facts: [], profileUpdates: [], entities: [], questions: [] };
2469
2602
  }
2603
+ const lastTurnTs = boundedTurns.length > 0 ? new Date(boundedTurns[boundedTurns.length - 1].timestamp) : void 0;
2604
+ const messageTimestamp = lastTurnTs && !isNaN(lastTurnTs.getTime()) ? lastTurnTs : void 0;
2470
2605
  const traceId = crypto.randomUUID();
2471
2606
  this.emit({ kind: "llm_start", traceId, model: this.config.model, operation: "extraction", input: conversation });
2472
2607
  const startTime = Date.now();
@@ -2477,7 +2612,7 @@ var ExtractionEngine = class {
2477
2612
  const durationMs = Date.now() - startTime;
2478
2613
  this.emit({ kind: "llm_end", traceId, model: this.config.localLlmModel, operation: "extraction", durationMs });
2479
2614
  log.debug(`extraction: used local LLM \u2014 ${localResult.facts.length} facts, ${localResult.entities.length} entities`);
2480
- const sanitized = this.sanitizeExtractionResult(localResult);
2615
+ const sanitized = this.sanitizeExtractionResult(localResult, messageTimestamp);
2481
2616
  return await this.applyProactiveQuestionPass(conversation, sanitized);
2482
2617
  }
2483
2618
  if (!this.config.localLlmFallback) {
@@ -2500,7 +2635,7 @@ var ExtractionEngine = class {
2500
2635
  const durationMs = Date.now() - startTime;
2501
2636
  this.emit({ kind: "llm_end", traceId, model: this.config.model, operation: "extraction", durationMs });
2502
2637
  log.debug(`extraction: used direct client (${this.config.model}) \u2014 ${directResult.facts.length} facts, ${directResult.entities.length} entities`);
2503
- const sanitized = this.sanitizeExtractionResult(directResult);
2638
+ const sanitized = this.sanitizeExtractionResult(directResult, messageTimestamp);
2504
2639
  return await this.applyProactiveQuestionPass(conversation, sanitized);
2505
2640
  }
2506
2641
  log.info("extraction: direct client returned no result, falling back to gateway AI");
@@ -2536,7 +2671,7 @@ var ExtractionEngine = class {
2536
2671
  ...result,
2537
2672
  questions: result.questions ?? [],
2538
2673
  identityReflection: result.identityReflection ?? void 0
2539
- });
2674
+ }, messageTimestamp);
2540
2675
  return await this.applyProactiveQuestionPass(conversation, sanitized);
2541
2676
  }
2542
2677
  log.warn("extraction fallback returned no parsed output");