@lowire/loop 0.0.18 → 0.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cache.js +1 -1
- package/lib/loop.d.ts +8 -5
- package/lib/loop.js +27 -14
- package/lib/providers/anthropic.js +2 -1
- package/lib/providers/google.js +2 -1
- package/lib/providers/openai.js +2 -1
- package/lib/providers/openaiCompatible.js +1 -0
- package/lib/types.d.ts +1 -0
- package/package.json +1 -1
package/lib/cache.js
CHANGED
|
@@ -36,7 +36,7 @@ async function cachedCompleteNoSecrets(provider, conversation, caches, options)
|
|
|
36
36
|
if (!process.env.LOWIRE_NO_CACHE && caches.output[key])
|
|
37
37
|
return caches.output[key];
|
|
38
38
|
if (process.env.LOWIRE_FORCE_CACHE)
|
|
39
|
-
throw new Error('Cache missing but
|
|
39
|
+
throw new Error('Cache missing but LOWIRE_FORCE_CACHE is set' + JSON.stringify(conversation, null, 2));
|
|
40
40
|
const result = await provider.complete(conversation, options);
|
|
41
41
|
caches.output[key] = result;
|
|
42
42
|
return result;
|
package/lib/loop.d.ts
CHANGED
|
@@ -20,31 +20,33 @@ export type LoopEvents = {
|
|
|
20
20
|
conversation: types.Conversation;
|
|
21
21
|
totalUsage: types.Usage;
|
|
22
22
|
budgetTokens?: number;
|
|
23
|
-
}) => PromiseOrValue<
|
|
23
|
+
}) => PromiseOrValue<void>;
|
|
24
24
|
onAfterTurn?: (params: {
|
|
25
25
|
assistantMessage: types.AssistantMessage;
|
|
26
26
|
totalUsage: types.Usage;
|
|
27
27
|
budgetTokens?: number;
|
|
28
|
-
}) => PromiseOrValue<
|
|
28
|
+
}) => PromiseOrValue<void>;
|
|
29
29
|
onBeforeToolCall?: (params: {
|
|
30
30
|
assistantMessage: types.AssistantMessage;
|
|
31
31
|
toolCall: types.ToolCallContentPart;
|
|
32
|
-
}) => PromiseOrValue<'
|
|
32
|
+
}) => PromiseOrValue<'disallow' | void>;
|
|
33
33
|
onAfterToolCall?: (params: {
|
|
34
34
|
assistantMessage: types.AssistantMessage;
|
|
35
35
|
toolCall: types.ToolCallContentPart;
|
|
36
36
|
result: types.ToolResult;
|
|
37
|
-
}) => PromiseOrValue<'
|
|
37
|
+
}) => PromiseOrValue<'disallow' | void>;
|
|
38
38
|
onToolCallError?: (params: {
|
|
39
39
|
assistantMessage: types.AssistantMessage;
|
|
40
40
|
toolCall: types.ToolCallContentPart;
|
|
41
41
|
error: Error;
|
|
42
|
-
}) => PromiseOrValue<
|
|
42
|
+
}) => PromiseOrValue<void>;
|
|
43
43
|
};
|
|
44
44
|
export type LoopOptions = types.CompletionOptions & LoopEvents & {
|
|
45
45
|
tools?: types.Tool[];
|
|
46
46
|
callTool?: types.ToolCallback;
|
|
47
47
|
maxTurns?: number;
|
|
48
|
+
maxToolCalls?: number;
|
|
49
|
+
maxToolCallRetries?: number;
|
|
48
50
|
cache?: types.ReplayCache;
|
|
49
51
|
secrets?: Record<string, string>;
|
|
50
52
|
summarize?: boolean;
|
|
@@ -56,6 +58,7 @@ export declare class Loop {
|
|
|
56
58
|
constructor(options: LoopOptions);
|
|
57
59
|
run(task: string, runOptions?: Omit<LoopOptions, 'model' | 'api' | 'apiKey'> & {
|
|
58
60
|
model?: string;
|
|
61
|
+
abortController?: AbortController;
|
|
59
62
|
}): Promise<{
|
|
60
63
|
result?: types.ToolResult;
|
|
61
64
|
status: 'ok' | 'break';
|
package/lib/loop.js
CHANGED
|
@@ -29,6 +29,7 @@ class Loop {
|
|
|
29
29
|
}
|
|
30
30
|
async run(task, runOptions = {}) {
|
|
31
31
|
const options = { ...this._loopOptions, ...runOptions };
|
|
32
|
+
const abortController = runOptions.abortController;
|
|
32
33
|
const allTools = [...(options.tools || []).map(wrapToolWithIsDone)];
|
|
33
34
|
const conversation = {
|
|
34
35
|
systemPrompt,
|
|
@@ -38,12 +39,16 @@ class Loop {
|
|
|
38
39
|
tools: allTools,
|
|
39
40
|
};
|
|
40
41
|
const debug = options.debug;
|
|
41
|
-
|
|
42
|
+
const budget = {
|
|
43
|
+
tokens: options.maxTokens,
|
|
44
|
+
toolCalls: options.maxToolCalls,
|
|
45
|
+
toolCallRetries: options.maxToolCallRetries,
|
|
46
|
+
};
|
|
42
47
|
const totalUsage = { input: 0, output: 0 };
|
|
43
|
-
debug?.('lowire:loop')(`Starting ${this._provider.name} loop
|
|
48
|
+
debug?.('lowire:loop')(`Starting ${this._provider.name} loop\n${task}`);
|
|
44
49
|
const maxTurns = options.maxTurns || 100;
|
|
45
50
|
for (let turns = 0; turns < maxTurns; ++turns) {
|
|
46
|
-
if (options.maxTokens &&
|
|
51
|
+
if (options.maxTokens && budget.tokens !== undefined && budget.tokens <= 0)
|
|
47
52
|
throw new Error(`Budget tokens ${options.maxTokens} exhausted`);
|
|
48
53
|
debug?.('lowire:loop')(`Turn ${turns + 1} of (max ${maxTurns})`);
|
|
49
54
|
const caches = options.cache ? {
|
|
@@ -51,23 +56,24 @@ class Loop {
|
|
|
51
56
|
output: this._cacheOutput,
|
|
52
57
|
} : undefined;
|
|
53
58
|
const summarizedConversation = options.summarize ? this._summarizeConversation(task, conversation, options) : conversation;
|
|
54
|
-
|
|
55
|
-
if (
|
|
59
|
+
await options.onBeforeTurn?.({ conversation: summarizedConversation, totalUsage, budgetTokens: budget.tokens });
|
|
60
|
+
if (abortController?.signal.aborted)
|
|
56
61
|
return { status: 'break', usage: totalUsage, turns };
|
|
57
62
|
debug?.('lowire:loop')(`Request`, JSON.stringify({ ...summarizedConversation, tools: `${summarizedConversation.tools.length} tools` }, null, 2));
|
|
58
63
|
const { result: assistantMessage, usage } = await (0, cache_1.cachedComplete)(this._provider, summarizedConversation, caches, {
|
|
59
64
|
...options,
|
|
60
|
-
maxTokens:
|
|
65
|
+
maxTokens: budget.tokens,
|
|
66
|
+
signal: abortController?.signal,
|
|
61
67
|
});
|
|
62
68
|
const intent = assistantMessage.content.filter(part => part.type === 'text').map(part => part.text).join('\n');
|
|
63
69
|
totalUsage.input += usage.input;
|
|
64
70
|
totalUsage.output += usage.output;
|
|
65
|
-
if (
|
|
66
|
-
|
|
71
|
+
if (budget.tokens !== undefined)
|
|
72
|
+
budget.tokens -= usage.input + usage.output;
|
|
67
73
|
debug?.('lowire:loop')('Usage', `input: ${usage.input}, output: ${usage.output}`);
|
|
68
74
|
debug?.('lowire:loop')('Assistant', intent, JSON.stringify(assistantMessage.content, null, 2));
|
|
69
|
-
|
|
70
|
-
if (
|
|
75
|
+
await options.onAfterTurn?.({ assistantMessage, totalUsage, budgetTokens: budget.tokens });
|
|
76
|
+
if (abortController?.signal.aborted)
|
|
71
77
|
return { status: 'break', usage: totalUsage, turns };
|
|
72
78
|
conversation.messages.push(assistantMessage);
|
|
73
79
|
const toolCalls = assistantMessage.content.filter(part => part.type === 'tool_call');
|
|
@@ -76,10 +82,12 @@ class Loop {
|
|
|
76
82
|
continue;
|
|
77
83
|
}
|
|
78
84
|
for (const toolCall of toolCalls) {
|
|
85
|
+
if (budget.toolCalls !== undefined && --budget.toolCalls < 0)
|
|
86
|
+
throw new Error(`Failed to perform step, max tool calls (${options.maxToolCalls}) reached`);
|
|
79
87
|
const { name, arguments: args } = toolCall;
|
|
80
88
|
debug?.('lowire:loop')('Call tool', name, JSON.stringify(args, null, 2));
|
|
81
89
|
const status = await options.onBeforeToolCall?.({ assistantMessage, toolCall });
|
|
82
|
-
if (
|
|
90
|
+
if (abortController?.signal.aborted)
|
|
83
91
|
return { status: 'break', usage: totalUsage, turns };
|
|
84
92
|
if (status === 'disallow') {
|
|
85
93
|
toolCall.result = {
|
|
@@ -103,7 +111,7 @@ class Loop {
|
|
|
103
111
|
const text = result.content.filter(part => part.type === 'text').map(part => part.text).join('\n');
|
|
104
112
|
debug?.('lowire:loop')('Tool result', text, JSON.stringify(result, null, 2));
|
|
105
113
|
const status = await options.onAfterToolCall?.({ assistantMessage, toolCall, result });
|
|
106
|
-
if (
|
|
114
|
+
if (abortController?.signal.aborted)
|
|
107
115
|
return { status: 'break', usage: totalUsage, turns };
|
|
108
116
|
if (status === 'disallow') {
|
|
109
117
|
toolCall.result = {
|
|
@@ -118,8 +126,8 @@ class Loop {
|
|
|
118
126
|
}
|
|
119
127
|
catch (error) {
|
|
120
128
|
const errorMessage = `Error while executing tool "${name}": ${error instanceof Error ? error.message : String(error)}\n\nPlease try to recover and complete the task.`;
|
|
121
|
-
|
|
122
|
-
if (
|
|
129
|
+
await options.onToolCallError?.({ assistantMessage, toolCall, error });
|
|
130
|
+
if (abortController?.signal.aborted)
|
|
123
131
|
return { status: 'break', usage: totalUsage, turns };
|
|
124
132
|
toolCall.result = {
|
|
125
133
|
content: [{ type: 'text', text: errorMessage }],
|
|
@@ -127,6 +135,11 @@ class Loop {
|
|
|
127
135
|
};
|
|
128
136
|
}
|
|
129
137
|
}
|
|
138
|
+
const hasErrors = toolCalls.some(toolCall => toolCall.result?.isError);
|
|
139
|
+
if (!hasErrors)
|
|
140
|
+
budget.toolCallRetries = options.maxToolCallRetries;
|
|
141
|
+
if (hasErrors && budget.toolCallRetries !== undefined && --budget.toolCallRetries < 0)
|
|
142
|
+
throw new Error(`Failed to perform action after ${options.maxToolCallRetries} tool call retries`);
|
|
130
143
|
}
|
|
131
144
|
throw new Error('Failed to perform step, max attempts reached');
|
|
132
145
|
}
|
|
@@ -52,7 +52,8 @@ async function create(createParams, options) {
|
|
|
52
52
|
const response = await fetch(options.apiEndpoint ?? `https://api.anthropic.com/v1/messages`, {
|
|
53
53
|
method: 'POST',
|
|
54
54
|
headers,
|
|
55
|
-
body: JSON.stringify(createParams)
|
|
55
|
+
body: JSON.stringify(createParams),
|
|
56
|
+
signal: options.signal,
|
|
56
57
|
});
|
|
57
58
|
if (!response.ok) {
|
|
58
59
|
options.debug?.('lowire:anthropic')('Response:', response.status);
|
package/lib/providers/google.js
CHANGED
|
@@ -52,7 +52,8 @@ async function create(model, createParams, options) {
|
|
|
52
52
|
'Content-Type': 'application/json',
|
|
53
53
|
'x-goog-api-key': options.apiKey,
|
|
54
54
|
},
|
|
55
|
-
body: JSON.stringify(createParams)
|
|
55
|
+
body: JSON.stringify(createParams),
|
|
56
|
+
signal: options.signal,
|
|
56
57
|
});
|
|
57
58
|
if (!response.ok) {
|
|
58
59
|
options.debug?.('lowire:google')('Response:', response.status);
|
package/lib/providers/openai.js
CHANGED
|
@@ -72,7 +72,8 @@ async function create(createParams, options) {
|
|
|
72
72
|
const response = await fetch(options.apiEndpoint ?? `https://api.openai.com/v1/responses`, {
|
|
73
73
|
method: 'POST',
|
|
74
74
|
headers,
|
|
75
|
-
body: JSON.stringify(createParams)
|
|
75
|
+
body: JSON.stringify(createParams),
|
|
76
|
+
signal: options.signal,
|
|
76
77
|
});
|
|
77
78
|
if (!response.ok) {
|
|
78
79
|
options.debug?.('lowire:openai-responses')('Response:', response.status);
|
package/lib/types.d.ts
CHANGED