recursive-llm-ts 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -87,6 +87,7 @@ interface RLMConfig {
87
87
  // Execution limits
88
88
  max_depth?: number; // Maximum recursion depth (default: 5)
89
89
  max_iterations?: number; // Maximum REPL iterations per call (default: 30)
90
+ pythonia_timeout?: number; // Python bridge timeout in ms (default: 100000ms = 100s)
90
91
 
91
92
  // LiteLLM parameters - pass any additional parameters supported by LiteLLM
92
93
  api_version?: string; // API version (e.g., for Azure)
@@ -174,6 +175,24 @@ const rlm = new RLM('openai/your-model', {
174
175
  });
175
176
  ```
176
177
 
178
+ ### Long-Running Processes
179
+
180
+ For large documents or queue-based processing that may take longer than the default 100s timeout:
181
+
182
+ ```typescript
183
+ const rlm = new RLM('gpt-4o-mini', {
184
+ max_iterations: 50, // Allow more iterations for complex processing
185
+ pythonia_timeout: 600000, // 10 minutes timeout for Python bridge
186
+ timeout: 300 // 5 minutes timeout for LLM API calls
187
+ });
188
+
189
+ // Process very large document
190
+ const result = await rlm.completion(
191
+ 'Summarize all key points from this document',
192
+ veryLargeDocument
193
+ );
194
+ ```
195
+
177
196
  ### Other Providers
178
197
 
179
198
  See the [LiteLLM documentation](https://docs.litellm.ai/docs/providers) for the complete list of supported providers and their configuration.
@@ -16,6 +16,7 @@ export interface RLMConfig {
16
16
  api_key?: string;
17
17
  max_depth?: number;
18
18
  max_iterations?: number;
19
+ pythonia_timeout?: number;
19
20
  [key: string]: any;
20
21
  }
21
22
  export declare class RLMBridge {
@@ -41,6 +41,17 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
41
41
  step((generator = generator.apply(thisArg, _arguments || [])).next());
42
42
  });
43
43
  };
44
+ var __rest = (this && this.__rest) || function (s, e) {
45
+ var t = {};
46
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
47
+ t[p] = s[p];
48
+ if (s != null && typeof Object.getOwnPropertySymbols === "function")
49
+ for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
50
+ if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
51
+ t[p[i]] = s[p[i]];
52
+ }
53
+ return t;
54
+ };
44
55
  Object.defineProperty(exports, "__esModule", { value: true });
45
56
  exports.RLMBridge = void 0;
46
57
  const pythonia_1 = require("pythonia");
@@ -67,11 +78,15 @@ class RLMBridge {
67
78
  return __awaiter(this, arguments, void 0, function* (model, query, context, rlmConfig = {}) {
68
79
  yield this.ensureRLMModule();
69
80
  try {
70
- // Create RLM instance with config
81
+ // Extract pythonia timeout (default: 100000ms)
82
+ const pythoniaTimeout = rlmConfig.pythonia_timeout || 100000;
83
+ // Remove pythonia_timeout from config passed to Python
84
+ const { pythonia_timeout } = rlmConfig, pythonConfig = __rest(rlmConfig, ["pythonia_timeout"]);
85
+ // Create RLM instance with config, passing timeout to pythonia
71
86
  const RLMClass = yield this.rlmModule.RLM;
72
- const rlmInstance = yield RLMClass(model, rlmConfig);
73
- // Call completion method
74
- const result = yield rlmInstance.completion(query, context);
87
+ const rlmInstance = yield RLMClass(model, Object.assign(Object.assign({}, pythonConfig), { $timeout: pythoniaTimeout }));
88
+ // Call completion method with timeout
89
+ const result = yield rlmInstance.completion(query, context, { $timeout: pythoniaTimeout });
75
90
  const stats = yield rlmInstance.stats;
76
91
  // Convert Python stats dict to JS object
77
92
  const statsObj = {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "recursive-llm-ts",
3
- "version": "1.0.3",
3
+ "version": "1.0.5",
4
4
  "description": "TypeScript bridge for recursive-llm: Recursive Language Models for unbounded context processing",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -8,7 +8,9 @@
8
8
  "dist",
9
9
  "recursive-llm/src",
10
10
  "recursive-llm/pyproject.toml",
11
- "scripts/install-python-deps.js"
11
+ "scripts/install-python-deps.js",
12
+ "scripts/apply-patches.js",
13
+ "patches/core.py.patch"
12
14
  ],
13
15
  "scripts": {
14
16
  "build": "tsc",
@@ -0,0 +1,40 @@
1
+ --- a/src/rlm/core.py
2
+ +++ b/src/rlm/core.py
3
+ @@ -54,14 +54,31 @@ class RLM:
4
+ _current_depth: Internal current depth tracker
5
+ **llm_kwargs: Additional LiteLLM parameters
6
+ """
7
+ - self.model = model
8
+ - self.recursive_model = recursive_model or model
9
+ - self.api_base = api_base
10
+ - self.api_key = api_key
11
+ - self.max_depth = max_depth
12
+ - self.max_iterations = max_iterations
13
+ + # Patch for recursive-llm-ts bug where config is passed as 2nd positional arg
14
+ + if isinstance(recursive_model, dict):
15
+ + config = recursive_model
16
+ + # Reset recursive_model default
17
+ + self.recursive_model = config.get('recursive_model', model)
18
+ + self.api_base = config.get('api_base', api_base)
19
+ + self.api_key = config.get('api_key', api_key)
20
+ + self.max_depth = int(config.get('max_depth', max_depth))
21
+ + self.max_iterations = int(config.get('max_iterations', max_iterations))
22
+ +
23
+ + # Extract other llm kwargs
24
+ + excluded = {'recursive_model', 'api_base', 'api_key', 'max_depth', 'max_iterations'}
25
+ + self.llm_kwargs = {k: v for k, v in config.items() if k not in excluded}
26
+ + # Merge with any actual kwargs passed
27
+ + self.llm_kwargs.update(llm_kwargs)
28
+ + else:
29
+ + self.recursive_model = recursive_model or model
30
+ + self.api_base = api_base
31
+ + self.api_key = api_key
32
+ + self.max_depth = max_depth
33
+ + self.max_iterations = max_iterations
34
+ + self.llm_kwargs = llm_kwargs
35
+ +
36
+ self._current_depth = _current_depth
37
+ - self.llm_kwargs = llm_kwargs
38
+ + self.model = model
39
+
40
+ self.repl = REPLExecutor()
@@ -61,8 +61,8 @@ class RLM:
61
61
  self.recursive_model = config.get('recursive_model', model)
62
62
  self.api_base = config.get('api_base', api_base)
63
63
  self.api_key = config.get('api_key', api_key)
64
- self.max_depth = config.get('max_depth', max_depth)
65
- self.max_iterations = config.get('max_iterations', max_iterations)
64
+ self.max_depth = int(config.get('max_depth', max_depth))
65
+ self.max_iterations = int(config.get('max_iterations', max_iterations))
66
66
 
67
67
  # Extract other llm kwargs
68
68
  excluded = {'recursive_model', 'api_base', 'api_key', 'max_depth', 'max_iterations'}
@@ -0,0 +1,88 @@
1
+ #!/usr/bin/env node
2
+ const { execSync } = require('child_process');
3
+ const path = require('path');
4
+ const fs = require('fs');
5
+
6
+ const corePyPath = path.join(__dirname, '..', 'recursive-llm', 'src', 'rlm', 'core.py');
7
+ const patchPath = path.join(__dirname, '..', 'patches', 'core.py.patch');
8
+
9
+ // Check if core.py exists
10
+ if (!fs.existsSync(corePyPath)) {
11
+ console.error('Error: core.py not found at', corePyPath);
12
+ process.exit(1);
13
+ }
14
+
15
+ // Check if patch exists
16
+ if (!fs.existsSync(patchPath)) {
17
+ console.error('Error: patch file not found at', patchPath);
18
+ process.exit(1);
19
+ }
20
+
21
+ console.log('Applying patches to recursive-llm Python package...');
22
+
23
+ // Check if patch is already applied by looking for the patch marker
24
+ const coreContent = fs.readFileSync(corePyPath, 'utf8');
25
+ if (coreContent.includes('# Patch for recursive-llm-ts bug where config is passed as 2nd positional arg')) {
26
+ console.log('✓ Patch already applied to core.py');
27
+ process.exit(0);
28
+ }
29
+
30
+ // Apply the patch
31
+ const pythonPackagePath = path.join(__dirname, '..', 'recursive-llm');
32
+
33
+ try {
34
+ // Try to apply patch
35
+ execSync(`patch -p1 -i "${patchPath}"`, {
36
+ cwd: pythonPackagePath,
37
+ stdio: 'inherit'
38
+ });
39
+ console.log('✓ Successfully applied core.py patch');
40
+ } catch (error) {
41
+ console.error('Failed to apply patch using patch command.');
42
+ console.error('Attempting manual patch application...');
43
+
44
+ // Fallback: manually apply the patch by replacing the constructor
45
+ try {
46
+ const updatedContent = coreContent.replace(
47
+ /(\s+)self\.model = model\n(\s+)self\.recursive_model = recursive_model or model\n(\s+)self\.api_base = api_base\n(\s+)self\.api_key = api_key\n(\s+)self\.max_depth = max_depth\n(\s+)self\.max_iterations = max_iterations\n(\s+)self\._current_depth = _current_depth\n(\s+)self\.llm_kwargs = llm_kwargs/,
48
+ `$1# Patch for recursive-llm-ts bug where config is passed as 2nd positional arg
49
+ $1if isinstance(recursive_model, dict):
50
+ $1 config = recursive_model
51
+ $1 # Reset recursive_model default
52
+ $1 self.recursive_model = config.get('recursive_model', model)
53
+ $1 self.api_base = config.get('api_base', api_base)
54
+ $1 self.api_key = config.get('api_key', api_key)
55
+ $1 self.max_depth = int(config.get('max_depth', max_depth))
56
+ $1 self.max_iterations = int(config.get('max_iterations', max_iterations))
57
+ $1
58
+ $1 # Extract other llm kwargs
59
+ $1 excluded = {'recursive_model', 'api_base', 'api_key', 'max_depth', 'max_iterations'}
60
+ $1 self.llm_kwargs = {k: v for k, v in config.items() if k not in excluded}
61
+ $1 # Merge with any actual kwargs passed
62
+ $1 self.llm_kwargs.update(llm_kwargs)
63
+ $1else:
64
+ $1 self.recursive_model = recursive_model or model
65
+ $1 self.api_base = api_base
66
+ $1 self.api_key = api_key
67
+ $1 self.max_depth = max_depth
68
+ $1 self.max_iterations = max_iterations
69
+ $1 self.llm_kwargs = llm_kwargs
70
+
71
+ $1self._current_depth = _current_depth
72
+ $1self.model = model`
73
+ );
74
+
75
+ if (updatedContent === coreContent) {
76
+ console.error('Manual patch failed: pattern not found in core.py');
77
+ console.error('The file may have been updated upstream.');
78
+ console.error('Please manually apply the patch from patches/core.py.patch');
79
+ process.exit(1);
80
+ }
81
+
82
+ fs.writeFileSync(corePyPath, updatedContent, 'utf8');
83
+ console.log('✓ Successfully applied patch manually');
84
+ } catch (manualError) {
85
+ console.error('Manual patch application failed:', manualError.message);
86
+ process.exit(1);
87
+ }
88
+ }
@@ -12,6 +12,14 @@ if (!fs.existsSync(pyprojectPath)) {
12
12
  process.exit(1);
13
13
  }
14
14
 
15
+ // Apply patches first
16
+ console.log('Applying patches to Python package...');
17
+ try {
18
+ execSync('node "' + path.join(__dirname, 'apply-patches.js') + '"', { stdio: 'inherit' });
19
+ } catch (error) {
20
+ console.error('Warning: Failed to apply patches. Continuing with installation...');
21
+ }
22
+
15
23
  console.log('Installing Python dependencies for recursive-llm...');
16
24
 
17
25
  try {