@martian-engineering/lossless-claw 0.2.0 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,11 +1,11 @@
1
1
  # lossless-claw
2
2
 
3
- > ⚠️ **Current requirement:** This plugin currently requires a custom OpenClaw build with [PR #22201](https://github.com/openclaw/openclaw/pull/22201) applied until that PR is merged upstream.
4
-
5
- Lossless Context Management plugin for [OpenClaw](https://github.com/openclaw/openclaw), based on the [LCM paper](https://voltropy.com/LCM). Replaces OpenClaw's built-in sliding-window compaction with a DAG-based summarization system that preserves every message while keeping active context within model token limits.
3
+ Lossless Context Management plugin for [OpenClaw](https://github.com/openclaw/openclaw), based on the [LCM paper](https://papers.voltropy.com/LCM). Replaces OpenClaw's built-in sliding-window compaction with a DAG-based summarization system that preserves every message while keeping active context within model token limits.
6
4
 
7
5
  ## What it does
8
6
 
7
+ Two ways to learn: read the below, or [check out this super cool animated visualization](https://losslesscontext.ai).
8
+
9
9
  When a conversation grows beyond the model's context window, OpenClaw (just like all of the other agents) normally truncates older messages. LCM instead:
10
10
 
11
11
  1. **Persists every message** in a SQLite database, organized by conversation
@@ -74,17 +74,19 @@ LCM is configured through a combination of plugin config and environment variabl
74
74
 
75
75
  ### Plugin config
76
76
 
77
- Add an `lossless-claw` block under `plugins.config` in your OpenClaw config:
77
+ Add a `lossless-claw` entry under `plugins.entries` in your OpenClaw config:
78
78
 
79
79
  ```json
80
80
  {
81
81
  "plugins": {
82
- "config": {
82
+ "entries": {
83
83
  "lossless-claw": {
84
84
  "enabled": true,
85
- "freshTailCount": 32,
86
- "contextThreshold": 0.75,
87
- "incrementalMaxDepth": 1
85
+ "config": {
86
+ "freshTailCount": 32,
87
+ "contextThreshold": 0.75,
88
+ "incrementalMaxDepth": -1
89
+ }
88
90
  }
89
91
  }
90
92
  }
@@ -102,7 +104,7 @@ Add an `lossless-claw` block under `plugins.config` in your OpenClaw config:
102
104
  | `LCM_LEAF_MIN_FANOUT` | `8` | Minimum raw messages per leaf summary |
103
105
  | `LCM_CONDENSED_MIN_FANOUT` | `4` | Minimum summaries per condensed node |
104
106
  | `LCM_CONDENSED_MIN_FANOUT_HARD` | `2` | Relaxed fanout for forced compaction sweeps |
105
- | `LCM_INCREMENTAL_MAX_DEPTH` | `0` | How deep incremental compaction goes (0 = leaf only) |
107
+ | `LCM_INCREMENTAL_MAX_DEPTH` | `0` | How deep incremental compaction goes (0 = leaf only, -1 = unlimited) |
106
108
  | `LCM_LEAF_CHUNK_TOKENS` | `20000` | Max source tokens per leaf compaction chunk |
107
109
  | `LCM_LEAF_TARGET_TOKENS` | `1200` | Target token count for leaf summaries |
108
110
  | `LCM_CONDENSED_TARGET_TOKENS` | `2000` | Target token count for condensed summaries |
@@ -110,18 +112,18 @@ Add an `lossless-claw` block under `plugins.config` in your OpenClaw config:
110
112
  | `LCM_LARGE_FILE_TOKEN_THRESHOLD` | `25000` | File blocks above this size are intercepted and stored separately |
111
113
  | `LCM_SUMMARY_MODEL` | *(from OpenClaw)* | Model for summarization (e.g. `anthropic/claude-sonnet-4-20250514`) |
112
114
  | `LCM_SUMMARY_PROVIDER` | *(from OpenClaw)* | Provider override for summarization |
113
- | `LCM_INCREMENTAL_MAX_DEPTH` | `0` | Depth limit for incremental condensation after leaf passes |
115
+ | `LCM_INCREMENTAL_MAX_DEPTH` | `0` | Depth limit for incremental condensation after leaf passes (-1 = unlimited) |
114
116
 
115
117
  ### Recommended starting configuration
116
118
 
117
119
  ```
118
120
  LCM_FRESH_TAIL_COUNT=32
119
- LCM_INCREMENTAL_MAX_DEPTH=1
121
+ LCM_INCREMENTAL_MAX_DEPTH=-1
120
122
  LCM_CONTEXT_THRESHOLD=0.75
121
123
  ```
122
124
 
123
125
  - **freshTailCount=32** protects the last 32 messages from compaction, giving the model enough recent context for continuity.
124
- - **incrementalMaxDepth=1** enables automatic condensation of leaf summaries after each compaction pass (without this, only leaf summaries are created and condensation only happens during manual `/compact` or overflow).
126
+ - **incrementalMaxDepth=-1** enables unlimited automatic condensation after each compaction pass the DAG cascades as deep as needed. Set to `0` (default) for leaf-only, or a positive integer for a specific depth cap.
125
127
  - **contextThreshold=0.75** triggers compaction when context reaches 75% of the model's window, leaving headroom for the model's response.
126
128
 
127
129
  ## How it works
@@ -171,7 +173,7 @@ This gives the model enough information to know what was discussed, when, and ho
171
173
 
172
174
  Compaction runs in two modes:
173
175
 
174
- - **Proactive (after each turn):** If raw messages outside the fresh tail exceed `leafChunkTokens`, a leaf pass runs. If `incrementalMaxDepth > 0`, condensation follows.
176
+ - **Proactive (after each turn):** If raw messages outside the fresh tail exceed `leafChunkTokens`, a leaf pass runs. If `incrementalMaxDepth != 0`, condensation follows (cascading to the configured depth, or unlimited with `-1`).
175
177
  - **Reactive (overflow/manual):** When total context exceeds `contextThreshold × tokenBudget`, a full sweep runs: all eligible leaf chunks are compacted, then condensation proceeds depth-by-depth until stable.
176
178
 
177
179
  ### Depth-aware prompts
@@ -87,7 +87,7 @@ The **condensed pass** merges summaries at the same depth into a higher-level su
87
87
  **Incremental (after each turn):**
88
88
  - Checks if raw tokens outside the fresh tail exceed `leafChunkTokens`
89
89
  - If so, runs one leaf pass
90
- - If `incrementalMaxDepth > 0`, follows with condensation passes up to that depth
90
+ - If `incrementalMaxDepth != 0`, follows with condensation passes up to that depth (`-1` for unlimited)
91
91
  - Best-effort: failures don't break the conversation
92
92
 
93
93
  **Full sweep (manual `/compact` or overflow):**
@@ -26,7 +26,7 @@ Set recommended environment variables:
26
26
 
27
27
  ```bash
28
28
  export LCM_FRESH_TAIL_COUNT=32
29
- export LCM_INCREMENTAL_MAX_DEPTH=1
29
+ export LCM_INCREMENTAL_MAX_DEPTH=-1
30
30
  ```
31
31
 
32
32
  Restart OpenClaw.
@@ -70,8 +70,9 @@ For coding conversations with tool calls (which generate many messages per logic
70
70
  `LCM_INCREMENTAL_MAX_DEPTH` (default `0`) controls whether condensation happens automatically after leaf passes.
71
71
 
72
72
  - **0** — Only leaf summaries are created incrementally. Condensation only happens during manual `/compact` or overflow.
73
- - **1** — After each leaf pass, attempt to condense d0 summaries into d1. Good default for active conversations.
74
- - **2+** — Deeper automatic condensation. Rarely needed; the full sweep handles this during overflow.
73
+ - **1** — After each leaf pass, attempt to condense d0 summaries into d1.
74
+ - **2+** — Deeper automatic condensation up to the specified depth.
75
+ - **-1** — Unlimited depth. Condensation cascades as deep as needed after each leaf pass. Recommended for long-running sessions.
75
76
 
76
77
  ### Summary target tokens
77
78
 
package/index.ts CHANGED
@@ -597,7 +597,7 @@ function readLatestAssistantReply(messages: unknown[]): string | undefined {
597
597
  function createLcmDependencies(api: OpenClawPluginApi): LcmDependencies {
598
598
  const envSnapshot = snapshotPluginEnv();
599
599
  const readEnv: ReadEnvFn = (key) => process.env[key];
600
- const config = resolveLcmConfig();
600
+ const config = resolveLcmConfig(process.env, _parsedPluginConfig);
601
601
 
602
602
  return {
603
603
  config,
@@ -854,6 +854,9 @@ function createLcmDependencies(api: OpenClawPluginApi): LcmDependencies {
854
854
  };
855
855
  }
856
856
 
857
+ /** Cached config from configSchema.parse — available by the time register() runs. */
858
+ let _parsedPluginConfig: Record<string, unknown> | undefined;
859
+
857
860
  const lcmPlugin = {
858
861
  id: "lossless-claw",
859
862
  name: "Lossless Context Management",
@@ -862,17 +865,12 @@ const lcmPlugin = {
862
865
 
863
866
  configSchema: {
864
867
  parse(value: unknown) {
865
- // Merge plugin config with env vars — env vars take precedence for backward compat
866
868
  const raw =
867
869
  value && typeof value === "object" && !Array.isArray(value)
868
870
  ? (value as Record<string, unknown>)
869
871
  : {};
870
- const enabled = typeof raw.enabled === "boolean" ? raw.enabled : undefined;
871
- const config = resolveLcmConfig();
872
- if (enabled !== undefined) {
873
- config.enabled = enabled;
874
- }
875
- return config;
872
+ _parsedPluginConfig = raw;
873
+ return resolveLcmConfig(process.env, raw);
876
874
  },
877
875
  },
878
876
 
@@ -7,7 +7,7 @@
7
7
  },
8
8
  "incrementalMaxDepth": {
9
9
  "label": "Incremental Max Depth",
10
- "help": "How deep incremental compaction goes (0 = leaf only)"
10
+ "help": "How deep incremental compaction goes (0 = leaf only, -1 = unlimited)"
11
11
  },
12
12
  "freshTailCount": {
13
13
  "label": "Fresh Tail Count",
@@ -32,7 +32,7 @@
32
32
  },
33
33
  "incrementalMaxDepth": {
34
34
  "type": "integer",
35
- "minimum": 0
35
+ "minimum": -1
36
36
  },
37
37
  "freshTailCount": {
38
38
  "type": "integer",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@martian-engineering/lossless-claw",
3
- "version": "0.2.0",
3
+ "version": "0.2.2",
4
4
  "description": "Lossless Context Management plugin for OpenClaw — DAG-based conversation summarization with incremental compaction",
5
5
  "type": "module",
6
6
  "main": "index.ts",
package/src/compaction.ts CHANGED
@@ -795,14 +795,13 @@ export class CompactionEngine {
795
795
  private resolveIncrementalMaxDepth(): number {
796
796
  if (
797
797
  typeof this.config.incrementalMaxDepth === "number" &&
798
- Number.isFinite(this.config.incrementalMaxDepth) &&
799
- this.config.incrementalMaxDepth > 0
798
+ Number.isFinite(this.config.incrementalMaxDepth)
800
799
  ) {
801
- return Math.floor(this.config.incrementalMaxDepth);
800
+ if (this.config.incrementalMaxDepth < 0) return Infinity;
801
+ if (this.config.incrementalMaxDepth > 0) return Math.floor(this.config.incrementalMaxDepth);
802
802
  }
803
803
  return 0;
804
804
  }
805
-
806
805
  private resolveFanoutForDepth(targetDepth: number, hardTrigger: boolean): number {
807
806
  if (hardTrigger) {
808
807
  return this.resolveCondensedMinFanoutHard();
package/src/db/config.ts CHANGED
@@ -26,25 +26,97 @@ export type LcmConfig = {
26
26
  pruneHeartbeatOk: boolean;
27
27
  };
28
28
 
29
- export function resolveLcmConfig(env: NodeJS.ProcessEnv = process.env): LcmConfig {
29
+ /** Safely coerce an unknown value to a finite number, or return undefined. */
30
+ function toNumber(value: unknown): number | undefined {
31
+ if (typeof value === "number" && Number.isFinite(value)) return value;
32
+ if (typeof value === "string") {
33
+ const n = Number(value);
34
+ if (Number.isFinite(n)) return n;
35
+ }
36
+ return undefined;
37
+ }
38
+
39
+ /** Safely coerce an unknown value to a boolean, or return undefined. */
40
+ function toBool(value: unknown): boolean | undefined {
41
+ if (typeof value === "boolean") return value;
42
+ if (value === "true") return true;
43
+ if (value === "false") return false;
44
+ return undefined;
45
+ }
46
+
47
+ /** Safely coerce an unknown value to a trimmed non-empty string, or return undefined. */
48
+ function toStr(value: unknown): string | undefined {
49
+ if (typeof value === "string") {
50
+ const trimmed = value.trim();
51
+ return trimmed.length > 0 ? trimmed : undefined;
52
+ }
53
+ return undefined;
54
+ }
55
+
56
+ /**
57
+ * Resolve LCM configuration with three-tier precedence:
58
+ * 1. Environment variables (highest — backward compat)
59
+ * 2. Plugin config object (from plugins.entries.lossless-claw.config)
60
+ * 3. Hardcoded defaults (lowest)
61
+ */
62
+ export function resolveLcmConfig(
63
+ env: NodeJS.ProcessEnv = process.env,
64
+ pluginConfig?: Record<string, unknown>,
65
+ ): LcmConfig {
66
+ const pc = pluginConfig ?? {};
67
+
30
68
  return {
31
- enabled: env.LCM_ENABLED !== "false",
32
- databasePath: env.LCM_DATABASE_PATH ?? join(homedir(), ".openclaw", "lcm.db"),
33
- contextThreshold: parseFloat(env.LCM_CONTEXT_THRESHOLD ?? "0.75"),
34
- freshTailCount: parseInt(env.LCM_FRESH_TAIL_COUNT ?? "32", 10),
35
- leafMinFanout: parseInt(env.LCM_LEAF_MIN_FANOUT ?? "8", 10),
36
- condensedMinFanout: parseInt(env.LCM_CONDENSED_MIN_FANOUT ?? "4", 10),
37
- condensedMinFanoutHard: parseInt(env.LCM_CONDENSED_MIN_FANOUT_HARD ?? "2", 10),
38
- incrementalMaxDepth: parseInt(env.LCM_INCREMENTAL_MAX_DEPTH ?? "0", 10),
39
- leafChunkTokens: parseInt(env.LCM_LEAF_CHUNK_TOKENS ?? "20000", 10),
40
- leafTargetTokens: parseInt(env.LCM_LEAF_TARGET_TOKENS ?? "1200", 10),
41
- condensedTargetTokens: parseInt(env.LCM_CONDENSED_TARGET_TOKENS ?? "2000", 10),
42
- maxExpandTokens: parseInt(env.LCM_MAX_EXPAND_TOKENS ?? "4000", 10),
43
- largeFileTokenThreshold: parseInt(env.LCM_LARGE_FILE_TOKEN_THRESHOLD ?? "25000", 10),
44
- largeFileSummaryProvider: env.LCM_LARGE_FILE_SUMMARY_PROVIDER?.trim() ?? "",
45
- largeFileSummaryModel: env.LCM_LARGE_FILE_SUMMARY_MODEL?.trim() ?? "",
46
- autocompactDisabled: env.LCM_AUTOCOMPACT_DISABLED === "true",
47
- timezone: env.TZ ?? Intl.DateTimeFormat().resolvedOptions().timeZone,
48
- pruneHeartbeatOk: env.LCM_PRUNE_HEARTBEAT_OK === "true",
69
+ enabled:
70
+ env.LCM_ENABLED !== undefined
71
+ ? env.LCM_ENABLED !== "false"
72
+ : toBool(pc.enabled) ?? true,
73
+ databasePath:
74
+ env.LCM_DATABASE_PATH ?? toStr(pc.databasePath) ?? join(homedir(), ".openclaw", "lcm.db"),
75
+ contextThreshold:
76
+ (env.LCM_CONTEXT_THRESHOLD !== undefined ? parseFloat(env.LCM_CONTEXT_THRESHOLD) : undefined)
77
+ ?? toNumber(pc.contextThreshold) ?? 0.75,
78
+ freshTailCount:
79
+ (env.LCM_FRESH_TAIL_COUNT !== undefined ? parseInt(env.LCM_FRESH_TAIL_COUNT, 10) : undefined)
80
+ ?? toNumber(pc.freshTailCount) ?? 32,
81
+ leafMinFanout:
82
+ (env.LCM_LEAF_MIN_FANOUT !== undefined ? parseInt(env.LCM_LEAF_MIN_FANOUT, 10) : undefined)
83
+ ?? toNumber(pc.leafMinFanout) ?? 8,
84
+ condensedMinFanout:
85
+ (env.LCM_CONDENSED_MIN_FANOUT !== undefined ? parseInt(env.LCM_CONDENSED_MIN_FANOUT, 10) : undefined)
86
+ ?? toNumber(pc.condensedMinFanout) ?? 4,
87
+ condensedMinFanoutHard:
88
+ (env.LCM_CONDENSED_MIN_FANOUT_HARD !== undefined ? parseInt(env.LCM_CONDENSED_MIN_FANOUT_HARD, 10) : undefined)
89
+ ?? toNumber(pc.condensedMinFanoutHard) ?? 2,
90
+ incrementalMaxDepth:
91
+ (env.LCM_INCREMENTAL_MAX_DEPTH !== undefined ? parseInt(env.LCM_INCREMENTAL_MAX_DEPTH, 10) : undefined)
92
+ ?? toNumber(pc.incrementalMaxDepth) ?? 0,
93
+ leafChunkTokens:
94
+ (env.LCM_LEAF_CHUNK_TOKENS !== undefined ? parseInt(env.LCM_LEAF_CHUNK_TOKENS, 10) : undefined)
95
+ ?? toNumber(pc.leafChunkTokens) ?? 20000,
96
+ leafTargetTokens:
97
+ (env.LCM_LEAF_TARGET_TOKENS !== undefined ? parseInt(env.LCM_LEAF_TARGET_TOKENS, 10) : undefined)
98
+ ?? toNumber(pc.leafTargetTokens) ?? 1200,
99
+ condensedTargetTokens:
100
+ (env.LCM_CONDENSED_TARGET_TOKENS !== undefined ? parseInt(env.LCM_CONDENSED_TARGET_TOKENS, 10) : undefined)
101
+ ?? toNumber(pc.condensedTargetTokens) ?? 2000,
102
+ maxExpandTokens:
103
+ (env.LCM_MAX_EXPAND_TOKENS !== undefined ? parseInt(env.LCM_MAX_EXPAND_TOKENS, 10) : undefined)
104
+ ?? toNumber(pc.maxExpandTokens) ?? 4000,
105
+ largeFileTokenThreshold:
106
+ (env.LCM_LARGE_FILE_TOKEN_THRESHOLD !== undefined ? parseInt(env.LCM_LARGE_FILE_TOKEN_THRESHOLD, 10) : undefined)
107
+ ?? toNumber(pc.largeFileTokenThreshold) ?? 25000,
108
+ largeFileSummaryProvider:
109
+ env.LCM_LARGE_FILE_SUMMARY_PROVIDER?.trim() ?? toStr(pc.largeFileSummaryProvider) ?? "",
110
+ largeFileSummaryModel:
111
+ env.LCM_LARGE_FILE_SUMMARY_MODEL?.trim() ?? toStr(pc.largeFileSummaryModel) ?? "",
112
+ autocompactDisabled:
113
+ env.LCM_AUTOCOMPACT_DISABLED !== undefined
114
+ ? env.LCM_AUTOCOMPACT_DISABLED === "true"
115
+ : toBool(pc.autocompactDisabled) ?? false,
116
+ timezone: env.TZ ?? toStr(pc.timezone) ?? Intl.DateTimeFormat().resolvedOptions().timeZone,
117
+ pruneHeartbeatOk:
118
+ env.LCM_PRUNE_HEARTBEAT_OK !== undefined
119
+ ? env.LCM_PRUNE_HEARTBEAT_OK === "true"
120
+ : toBool(pc.pruneHeartbeatOk) ?? false,
49
121
  };
50
122
  }
package/src/summarize.ts CHANGED
@@ -1,4 +1,3 @@
1
- import { resolveLcmConfig } from "./db/config.js";
2
1
  import type { LcmDependencies } from "./types.js";
3
2
 
4
3
  export type LcmSummarizeOptions = {
@@ -675,11 +674,10 @@ export async function createLcmSummarizeFromLegacyParams(params: {
675
674
 
676
675
  const apiKey = params.deps.getApiKey(provider, model);
677
676
 
678
- const runtimeLcmConfig = resolveLcmConfig();
679
677
  const condensedTargetTokens =
680
- Number.isFinite(runtimeLcmConfig.condensedTargetTokens) &&
681
- runtimeLcmConfig.condensedTargetTokens > 0
682
- ? runtimeLcmConfig.condensedTargetTokens
678
+ Number.isFinite(params.deps.config.condensedTargetTokens) &&
679
+ params.deps.config.condensedTargetTokens > 0
680
+ ? params.deps.config.condensedTargetTokens
683
681
  : DEFAULT_CONDENSED_TARGET_TOKENS;
684
682
 
685
683
  return async (