@superadnim/rlm-pro 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -18
- package/bin/rlm-codebase.js +18 -11
- package/package.json +1 -1
- package/src/executor.js +14 -10
- package/src/python-manager.js +50 -15
package/README.md
CHANGED
|
@@ -1,17 +1,19 @@
|
|
|
1
|
-
#
|
|
1
|
+
# RLM PRO
|
|
2
2
|
|
|
3
|
-
Analyze
|
|
3
|
+
Analyze **any corpus of unstructured data** using Recursive Language Models - enables LLMs to handle near-infinite context through recursive decomposition.
|
|
4
|
+
|
|
5
|
+
Works with codebases, document collections, research papers, logs, and any text-based data corpus.
|
|
4
6
|
|
|
5
7
|
Based on the [RLM research](https://arxiv.org/abs/2512.24601) from MIT OASYS lab.
|
|
6
8
|
|
|
7
9
|
## Installation
|
|
8
10
|
|
|
9
11
|
```bash
|
|
10
|
-
# Using npx (recommended -
|
|
11
|
-
npx rlm-
|
|
12
|
+
# Using npx (recommended - auto-installs Python package from GitHub)
|
|
13
|
+
npx @superadnim/rlm-pro ./my-project -q "Explain the architecture"
|
|
12
14
|
|
|
13
15
|
# Or install globally
|
|
14
|
-
npm install -g rlm-
|
|
16
|
+
npm install -g @superadnim/rlm-pro
|
|
15
17
|
```
|
|
16
18
|
|
|
17
19
|
### Prerequisites
|
|
@@ -24,6 +26,9 @@ npm install -g rlm-codebase
|
|
|
24
26
|
# Install uv if not already installed
|
|
25
27
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
26
28
|
|
|
29
|
+
# Install the Python package (auto-installed on first run, or install manually)
|
|
30
|
+
uv pip install git+https://github.com/CG-Labs/RLM-PRO.git
|
|
31
|
+
|
|
27
32
|
# Set your API key
|
|
28
33
|
export OPENAI_API_KEY="your-key"
|
|
29
34
|
```
|
|
@@ -34,28 +39,28 @@ export OPENAI_API_KEY="your-key"
|
|
|
34
39
|
|
|
35
40
|
```bash
|
|
36
41
|
# Basic usage
|
|
37
|
-
npx rlm-
|
|
42
|
+
npx @superadnim/rlm-pro ./my-project -q "Explain the architecture"
|
|
38
43
|
|
|
39
44
|
# Get JSON output (for programmatic use)
|
|
40
|
-
npx rlm-
|
|
45
|
+
npx @superadnim/rlm-pro ./my-project -q "List all API endpoints" --json
|
|
41
46
|
|
|
42
47
|
# Use a specific model
|
|
43
|
-
npx rlm-
|
|
48
|
+
npx @superadnim/rlm-pro ./my-project -q "Find potential bugs" -m gpt-5.2
|
|
44
49
|
|
|
45
50
|
# Use Anthropic backend
|
|
46
|
-
npx rlm-
|
|
51
|
+
npx @superadnim/rlm-pro ./my-project -q "Review this code" -b anthropic
|
|
47
52
|
|
|
48
53
|
# Verbose output for debugging
|
|
49
|
-
npx rlm-
|
|
54
|
+
npx @superadnim/rlm-pro ./my-project -q "How does authentication work?" -v
|
|
50
55
|
|
|
51
56
|
# Only build context (no LLM call)
|
|
52
|
-
npx rlm-
|
|
57
|
+
npx @superadnim/rlm-pro ./my-project -q "" --context-only
|
|
53
58
|
```
|
|
54
59
|
|
|
55
60
|
### Programmatic Usage (Node.js)
|
|
56
61
|
|
|
57
62
|
```javascript
|
|
58
|
-
const { analyzeCodebase } = require('rlm-
|
|
63
|
+
const { analyzeCodebase } = require('@superadnim/rlm-pro');
|
|
59
64
|
|
|
60
65
|
async function main() {
|
|
61
66
|
const result = await analyzeCodebase('./my-project', {
|
|
@@ -110,22 +115,22 @@ This allows answering complex questions about large codebases that would exceed
|
|
|
110
115
|
|
|
111
116
|
### Architecture Analysis
|
|
112
117
|
```bash
|
|
113
|
-
npx rlm-
|
|
118
|
+
npx @superadnim/rlm-pro ./backend -q "Describe the system architecture and key design patterns"
|
|
114
119
|
```
|
|
115
120
|
|
|
116
121
|
### Bug Finding
|
|
117
122
|
```bash
|
|
118
|
-
npx rlm-
|
|
123
|
+
npx @superadnim/rlm-pro ./src -q "Find potential security vulnerabilities" --json
|
|
119
124
|
```
|
|
120
125
|
|
|
121
126
|
### Documentation Generation
|
|
122
127
|
```bash
|
|
123
|
-
npx rlm-
|
|
128
|
+
npx @superadnim/rlm-pro ./api -q "Generate API documentation for all endpoints"
|
|
124
129
|
```
|
|
125
130
|
|
|
126
131
|
### Code Review
|
|
127
132
|
```bash
|
|
128
|
-
npx rlm-
|
|
133
|
+
npx @superadnim/rlm-pro ./feature-branch -q "Review this code for best practices"
|
|
129
134
|
```
|
|
130
135
|
|
|
131
136
|
## License
|
|
@@ -135,5 +140,4 @@ MIT
|
|
|
135
140
|
## Links
|
|
136
141
|
|
|
137
142
|
- [RLM Research Paper](https://arxiv.org/abs/2512.24601)
|
|
138
|
-
- [
|
|
139
|
-
- [GitHub Repository](https://github.com/mit-oasys/rlm)
|
|
143
|
+
- [GitHub Repository](https://github.com/CG-Labs/RLM-PRO)
|
package/bin/rlm-codebase.js
CHANGED
|
@@ -1,25 +1,27 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
* RLM
|
|
4
|
+
* RLM PRO CLI - Node.js wrapper for the Python RLM library
|
|
5
5
|
*
|
|
6
|
-
* Analyzes
|
|
7
|
-
* to handle near-infinite context through recursive decomposition.
|
|
6
|
+
* Analyzes any corpus of unstructured data using Recursive Language Models,
|
|
7
|
+
* enabling LLMs to handle near-infinite context through recursive decomposition.
|
|
8
|
+
*
|
|
9
|
+
* Works with codebases, documents, research papers, logs, and any text data.
|
|
8
10
|
*
|
|
9
11
|
* Usage:
|
|
10
|
-
* npx rlm-
|
|
11
|
-
* npx rlm-
|
|
12
|
+
* npx @superadnim/rlm-pro ./my-data -q "Analyse this corpus"
|
|
13
|
+
* npx @superadnim/rlm-pro ./docs -q "Summarise key findings" --json
|
|
12
14
|
*/
|
|
13
15
|
|
|
14
16
|
const { program } = require('commander');
|
|
15
17
|
const { analyzeCodebase, getVersion } = require('../src/index.js');
|
|
16
18
|
|
|
17
19
|
program
|
|
18
|
-
.name('rlm-
|
|
19
|
-
.description('Analyze
|
|
20
|
-
.version('
|
|
21
|
-
.argument('<path>', 'Path to
|
|
22
|
-
.requiredOption('-q, --query <query>', 'Question or task to perform on the
|
|
20
|
+
.name('rlm-pro')
|
|
21
|
+
.description('Analyze any corpus of unstructured data using Recursive Language Models')
|
|
22
|
+
.version('1.0.3')
|
|
23
|
+
.argument('<path>', 'Path to directory or file to analyze')
|
|
24
|
+
.requiredOption('-q, --query <query>', 'Question or task to perform on the data')
|
|
23
25
|
.option('-b, --backend <backend>', 'LLM backend (openai, anthropic, etc.)', 'openai')
|
|
24
26
|
.option('-m, --model <model>', 'Model name to use')
|
|
25
27
|
.option('-e, --env <env>', 'Execution environment (local, docker, modal, prime)', 'local')
|
|
@@ -55,4 +57,9 @@ program
|
|
|
55
57
|
}
|
|
56
58
|
});
|
|
57
59
|
|
|
58
|
-
|
|
60
|
+
// Show help if no arguments provided
|
|
61
|
+
if (process.argv.length <= 2) {
|
|
62
|
+
program.help();
|
|
63
|
+
} else {
|
|
64
|
+
program.parse();
|
|
65
|
+
}
|
package/package.json
CHANGED
package/src/executor.js
CHANGED
|
@@ -45,17 +45,17 @@ async function runRlm(args) {
|
|
|
45
45
|
let result = null;
|
|
46
46
|
let lastError = null;
|
|
47
47
|
|
|
48
|
-
// Method 1: Try uvx (
|
|
49
|
-
result = await tryCommand('
|
|
48
|
+
// Method 1: Try uvx with rlm-pro (if installed via pip from GitHub)
|
|
49
|
+
result = await tryCommand('rlm-pro', args, execOptions);
|
|
50
50
|
|
|
51
|
-
// Method 2: Try
|
|
51
|
+
// Method 2: Try rlm-codebase command (alternative entry point)
|
|
52
52
|
if (!result) {
|
|
53
|
-
result = await tryCommand('
|
|
53
|
+
result = await tryCommand('rlm-codebase', args, execOptions);
|
|
54
54
|
}
|
|
55
55
|
|
|
56
|
-
// Method 3: Try
|
|
56
|
+
// Method 3: Try uv run rlm-codebase (for local development with pyproject.toml)
|
|
57
57
|
if (!result) {
|
|
58
|
-
result = await tryCommand('rlm-codebase', args, execOptions);
|
|
58
|
+
result = await tryCommand('uv', ['run', 'rlm-codebase', ...args], execOptions);
|
|
59
59
|
}
|
|
60
60
|
|
|
61
61
|
// Method 4: Try python -m rlm.cli (fallback)
|
|
@@ -72,8 +72,7 @@ async function runRlm(args) {
|
|
|
72
72
|
throw new Error(
|
|
73
73
|
'Could not run RLM CLI. Please ensure:\n' +
|
|
74
74
|
'1. uv is installed: curl -LsSf https://astral.sh/uv/install.sh | sh\n' +
|
|
75
|
-
'2. rlm
|
|
76
|
-
' Or for local development: cd <rlm-repo> && uv pip install -e .\n' +
|
|
75
|
+
'2. rlm-pro is installed: uv pip install git+https://github.com/CG-Labs/RLM-PRO.git\n' +
|
|
77
76
|
'3. Restart your terminal after installation'
|
|
78
77
|
);
|
|
79
78
|
}
|
|
@@ -107,10 +106,15 @@ async function runRlm(args) {
|
|
|
107
106
|
*/
|
|
108
107
|
async function checkSetup() {
|
|
109
108
|
try {
|
|
110
|
-
await execFileAsync('
|
|
109
|
+
await execFileAsync('rlm-pro', ['--version']);
|
|
111
110
|
return true;
|
|
112
111
|
} catch {
|
|
113
|
-
|
|
112
|
+
try {
|
|
113
|
+
await execFileAsync('rlm-codebase', ['--version']);
|
|
114
|
+
return true;
|
|
115
|
+
} catch {
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
114
118
|
}
|
|
115
119
|
}
|
|
116
120
|
|
package/src/python-manager.js
CHANGED
|
@@ -26,15 +26,37 @@ async function checkUv() {
|
|
|
26
26
|
}
|
|
27
27
|
|
|
28
28
|
/**
|
|
29
|
-
* Check if
|
|
29
|
+
* Check if rlm-pro is installed.
|
|
30
30
|
* @returns {Promise<boolean>}
|
|
31
31
|
*/
|
|
32
32
|
async function checkRlmInstalled() {
|
|
33
33
|
try {
|
|
34
|
-
await execFileAsync('
|
|
34
|
+
await execFileAsync('rlm-pro', ['--version']);
|
|
35
35
|
return true;
|
|
36
36
|
} catch {
|
|
37
|
-
|
|
37
|
+
try {
|
|
38
|
+
await execFileAsync('rlm-codebase', ['--version']);
|
|
39
|
+
return true;
|
|
40
|
+
} catch {
|
|
41
|
+
return false;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Install rlm-pro from GitHub.
|
|
48
|
+
* @returns {Promise<boolean>}
|
|
49
|
+
*/
|
|
50
|
+
async function installRlmPro() {
|
|
51
|
+
try {
|
|
52
|
+
console.log('Installing rlm-pro from GitHub...');
|
|
53
|
+
await execFileAsync('uv', ['pip', 'install', 'git+https://github.com/CG-Labs/RLM-PRO.git'], {
|
|
54
|
+
timeout: 120000,
|
|
55
|
+
});
|
|
56
|
+
console.log('rlm-pro installed successfully.');
|
|
57
|
+
return true;
|
|
58
|
+
} catch (err) {
|
|
59
|
+
console.error('Failed to install rlm-pro:', err.message);
|
|
38
60
|
return false;
|
|
39
61
|
}
|
|
40
62
|
}
|
|
@@ -94,19 +116,31 @@ async function checkPythonSetup() {
|
|
|
94
116
|
/**
|
|
95
117
|
* Check command - run from postinstall
|
|
96
118
|
*/
|
|
97
|
-
function runCheck() {
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
119
|
+
async function runCheck() {
|
|
120
|
+
try {
|
|
121
|
+
const hasUv = await checkUv();
|
|
122
|
+
if (!hasUv) {
|
|
123
|
+
console.log('RLM PRO: uv not detected. Install it to use this tool:');
|
|
124
|
+
console.log(' curl -LsSf https://astral.sh/uv/install.sh | sh');
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
console.log('RLM PRO: Python environment ready (uv detected)');
|
|
129
|
+
|
|
130
|
+
const hasRlm = await checkRlmInstalled();
|
|
131
|
+
if (!hasRlm) {
|
|
132
|
+
console.log('RLM PRO: Installing Python package from GitHub...');
|
|
133
|
+
const installed = await installRlmPro();
|
|
134
|
+
if (!installed) {
|
|
135
|
+
console.log('RLM PRO: Auto-install failed. Install manually:');
|
|
136
|
+
console.log(' uv pip install git+https://github.com/CG-Labs/RLM-PRO.git');
|
|
105
137
|
}
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
138
|
+
} else {
|
|
139
|
+
console.log('RLM PRO: Python package already installed');
|
|
140
|
+
}
|
|
141
|
+
} catch (err) {
|
|
142
|
+
console.error('RLM PRO: Setup check failed:', err.message);
|
|
143
|
+
}
|
|
110
144
|
}
|
|
111
145
|
|
|
112
146
|
// Handle CLI invocation
|
|
@@ -125,4 +159,5 @@ module.exports = {
|
|
|
125
159
|
checkRlmInstalled,
|
|
126
160
|
checkPythonSetup,
|
|
127
161
|
getUvInstallInstructions,
|
|
162
|
+
installRlmPro,
|
|
128
163
|
};
|