rlm-cli 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -15
- package/dist/interactive.js +44 -3
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -13,34 +13,38 @@ CLI for **Recursive Language Models** — based on the [RLM paper](https://arxiv
|
|
|
13
13
|
|
|
14
14
|
Instead of dumping a huge context into a single LLM call, RLM lets the model write Python code to process it — slicing, chunking, running sub-queries on pieces, and building up an answer across multiple iterations.
|
|
15
15
|
|
|
16
|
-
##
|
|
16
|
+
## Install
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
npm install -g rlm-cli
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
Set your API key:
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
export ANTHROPIC_API_KEY=sk-ant-...
|
|
26
|
+
# or
|
|
27
|
+
export OPENAI_API_KEY=sk-...
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
That's it. Run `rlm` and you're in.
|
|
31
|
+
|
|
32
|
+
### From Source
|
|
17
33
|
|
|
18
34
|
```bash
|
|
19
35
|
git clone https://github.com/viplismism/rlm-cli.git
|
|
20
36
|
cd rlm-cli
|
|
21
37
|
npm install
|
|
22
38
|
npm run build
|
|
23
|
-
npm link
|
|
39
|
+
npm link
|
|
24
40
|
```
|
|
25
41
|
|
|
26
|
-
|
|
42
|
+
Create a `.env` file in the project root with your API key:
|
|
27
43
|
|
|
28
44
|
```bash
|
|
29
45
|
cp .env.example .env
|
|
30
46
|
```
|
|
31
47
|
|
|
32
|
-
```bash
|
|
33
|
-
# .env
|
|
34
|
-
ANTHROPIC_API_KEY=sk-ant-...
|
|
35
|
-
# or
|
|
36
|
-
OPENAI_API_KEY=sk-...
|
|
37
|
-
|
|
38
|
-
# Optional: override default model
|
|
39
|
-
# RLM_MODEL=claude-sonnet-4-5-20250929
|
|
40
|
-
```
|
|
41
|
-
|
|
42
|
-
That's it. Run `rlm` and you're in.
|
|
43
|
-
|
|
44
48
|
## Usage
|
|
45
49
|
|
|
46
50
|
### Interactive Terminal
|
package/dist/interactive.js
CHANGED
|
@@ -159,6 +159,10 @@ ${c.bold}Context${c.reset}
|
|
|
159
159
|
${c.yellow}/context${c.reset} Show loaded context info
|
|
160
160
|
${c.yellow}/clear-context${c.reset} Unload context
|
|
161
161
|
|
|
162
|
+
${c.bold}Model${c.reset}
|
|
163
|
+
${c.yellow}/model${c.reset} Show current model & list available
|
|
164
|
+
${c.yellow}/model${c.reset} <id> Switch model for this session
|
|
165
|
+
|
|
162
166
|
${c.bold}Tools${c.reset}
|
|
163
167
|
${c.yellow}/trajectories${c.reset} List saved runs
|
|
164
168
|
|
|
@@ -595,9 +599,13 @@ async function interactive() {
|
|
|
595
599
|
// Validate env
|
|
596
600
|
const hasApiKey = process.env.ANTHROPIC_API_KEY || process.env.OPENAI_API_KEY;
|
|
597
601
|
if (!hasApiKey) {
|
|
598
|
-
|
|
599
|
-
console.log(`
|
|
600
|
-
|
|
602
|
+
printBanner();
|
|
603
|
+
console.log(` ${c.red}No API key found.${c.reset}\n`);
|
|
604
|
+
console.log(` Set one of these environment variables:\n`);
|
|
605
|
+
console.log(` ${c.yellow}export ANTHROPIC_API_KEY=sk-ant-...${c.reset} ${c.dim}# Anthropic (Claude)${c.reset}`);
|
|
606
|
+
console.log(` ${c.yellow}export OPENAI_API_KEY=sk-...${c.reset} ${c.dim}# OpenAI (GPT)${c.reset}\n`);
|
|
607
|
+
console.log(` ${c.dim}Add to your shell profile (~/.zshrc or ~/.bashrc) to persist across sessions.${c.reset}\n`);
|
|
608
|
+
process.exit(0);
|
|
601
609
|
}
|
|
602
610
|
// Resolve model
|
|
603
611
|
currentModel = resolveModel(currentModelId);
|
|
@@ -675,6 +683,39 @@ async function interactive() {
|
|
|
675
683
|
contextSource = "";
|
|
676
684
|
console.log(` ${c.green}✓${c.reset} Context cleared.`);
|
|
677
685
|
break;
|
|
686
|
+
case "model":
|
|
687
|
+
case "m":
|
|
688
|
+
if (arg) {
|
|
689
|
+
const newModel = resolveModel(arg);
|
|
690
|
+
if (newModel) {
|
|
691
|
+
currentModelId = arg;
|
|
692
|
+
currentModel = newModel;
|
|
693
|
+
console.log(` ${c.green}✓${c.reset} Switched to ${c.bold}${currentModelId}${c.reset}`);
|
|
694
|
+
console.log();
|
|
695
|
+
printStatusLine();
|
|
696
|
+
}
|
|
697
|
+
else {
|
|
698
|
+
console.log(` ${c.red}Model "${arg}" not found.${c.reset} Use ${c.yellow}/model${c.reset} to list available models.`);
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
else {
|
|
702
|
+
console.log(`\n ${c.bold}Current model:${c.reset} ${c.cyan}${currentModelId}${c.reset}\n`);
|
|
703
|
+
for (const provider of getProviders()) {
|
|
704
|
+
const providerKey = `${provider.toUpperCase().replace(/-/g, "_")}_API_KEY`;
|
|
705
|
+
if (!process.env[providerKey] && provider !== detectProvider())
|
|
706
|
+
continue;
|
|
707
|
+
const models = getModels(provider);
|
|
708
|
+
if (models.length === 0)
|
|
709
|
+
continue;
|
|
710
|
+
console.log(` ${c.bold}${provider}${c.reset}`);
|
|
711
|
+
for (const m of models) {
|
|
712
|
+
const marker = m.id === currentModelId ? `${c.green}● ${c.reset}` : ` `;
|
|
713
|
+
console.log(` ${marker}${c.dim}${m.id}${c.reset}`);
|
|
714
|
+
}
|
|
715
|
+
console.log();
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
break;
|
|
678
719
|
case "trajectories":
|
|
679
720
|
case "traj":
|
|
680
721
|
handleTrajectories();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "rlm-cli",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.1",
|
|
4
4
|
"description": "Standalone CLI for Recursive Language Models (RLMs) — implements Algorithm 1 from arXiv:2512.24601",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -32,7 +32,7 @@
|
|
|
32
32
|
],
|
|
33
33
|
"license": "MIT",
|
|
34
34
|
"dependencies": {
|
|
35
|
-
"@mariozechner/pi-ai": "^0.
|
|
35
|
+
"@mariozechner/pi-ai": "^0.55.1"
|
|
36
36
|
},
|
|
37
37
|
"devDependencies": {
|
|
38
38
|
"esbuild": "^0.27.3",
|