@alpic80/rivet-core 1.19.1-aidon.2 → 1.24.0-aidon.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/dist/cjs/bundle.cjs +4187 -1020
- package/dist/cjs/bundle.cjs.map +4 -4
- package/dist/esm/api/createProcessor.js +8 -17
- package/dist/esm/api/looseDataValue.js +16 -0
- package/dist/esm/exports.js +2 -0
- package/dist/esm/integrations/CodeRunner.js +36 -0
- package/dist/esm/integrations/GptTokenizerTokenizer.js +7 -4
- package/dist/esm/integrations/openai/OpenAIEmbeddingGenerator.js +1 -1
- package/dist/esm/model/DataValue.js +14 -2
- package/dist/esm/model/GraphProcessor.js +275 -104
- package/dist/esm/model/NodeBase.js +11 -1
- package/dist/esm/model/NodeImpl.js +8 -0
- package/dist/esm/model/Nodes.js +31 -4
- package/dist/esm/model/ProjectReferenceLoader.js +1 -0
- package/dist/esm/model/nodes/AssembleMessageNode.js +12 -2
- package/dist/esm/model/nodes/AssemblePromptNode.js +22 -0
- package/dist/esm/model/nodes/CallGraphNode.js +3 -4
- package/dist/esm/model/nodes/ChatLoopNode.js +150 -0
- package/dist/esm/model/nodes/ChatNode.js +7 -934
- package/dist/esm/model/nodes/ChatNodeBase.js +1275 -0
- package/dist/esm/model/nodes/ChunkNode.js +2 -2
- package/dist/esm/model/nodes/CodeNode.js +40 -4
- package/dist/esm/model/nodes/CronNode.js +248 -0
- package/dist/esm/model/nodes/DelegateFunctionCallNode.js +37 -12
- package/dist/esm/model/nodes/DestructureNode.js +1 -1
- package/dist/esm/model/nodes/DocumentNode.js +183 -0
- package/dist/esm/model/nodes/ExtractJsonNode.js +4 -4
- package/dist/esm/model/nodes/ExtractRegexNode.js +10 -11
- package/dist/esm/model/nodes/GetEmbeddingNode.js +1 -1
- package/dist/esm/model/nodes/HttpCallNode.js +3 -1
- package/dist/esm/model/nodes/IfNode.js +5 -0
- package/dist/esm/model/nodes/ImageToMDNode.js +116 -0
- package/dist/esm/model/nodes/LoopUntilNode.js +214 -0
- package/dist/esm/model/nodes/PromptNode.js +29 -6
- package/dist/esm/model/nodes/ReadAllFilesNode.js +210 -0
- package/dist/esm/model/nodes/ReadDirectoryNode.js +31 -25
- package/dist/esm/model/nodes/ReferencedGraphAliasNode.js +199 -0
- package/dist/esm/model/nodes/TextNode.js +9 -4
- package/dist/esm/model/nodes/ToMarkdownTableNode.js +119 -0
- package/dist/esm/model/nodes/ToTreeNode.js +133 -0
- package/dist/esm/model/nodes/{GptFunctionNode.js → ToolNode.js} +10 -10
- package/dist/esm/model/nodes/UserInputNode.js +10 -12
- package/dist/esm/plugins/aidon/nodes/ChatAidonNode.js +3 -3
- package/dist/esm/plugins/anthropic/anthropic.js +29 -10
- package/dist/esm/plugins/anthropic/fetchEventSource.js +3 -2
- package/dist/esm/plugins/anthropic/nodes/ChatAnthropicNode.js +267 -147
- package/dist/esm/plugins/anthropic/plugin.js +9 -1
- package/dist/esm/plugins/gentrace/plugin.js +6 -6
- package/dist/esm/plugins/google/google.js +113 -5
- package/dist/esm/plugins/google/nodes/ChatGoogleNode.js +211 -54
- package/dist/esm/plugins/google/plugin.js +13 -6
- package/dist/esm/plugins/openai/nodes/RunThreadNode.js +2 -2
- package/dist/esm/recording/ExecutionRecorder.js +5 -1
- package/dist/esm/utils/chatMessageToOpenAIChatCompletionMessage.js +15 -2
- package/dist/esm/utils/coerceType.js +1 -1
- package/dist/esm/utils/fetchEventSource.js +1 -1
- package/dist/esm/utils/interpolation.js +108 -3
- package/dist/esm/utils/openai.js +106 -50
- package/dist/esm/utils/paths.js +80 -0
- package/dist/esm/utils/serialization/serialization_v4.js +5 -0
- package/dist/types/api/createProcessor.d.ts +11 -5
- package/dist/types/api/looseDataValue.d.ts +4 -0
- package/dist/types/api/streaming.d.ts +1 -1
- package/dist/types/exports.d.ts +2 -0
- package/dist/types/integrations/CodeRunner.d.ts +18 -0
- package/dist/types/model/DataValue.d.ts +29 -6
- package/dist/types/model/EditorDefinition.d.ts +6 -1
- package/dist/types/model/GraphProcessor.d.ts +14 -7
- package/dist/types/model/NodeBase.d.ts +4 -0
- package/dist/types/model/NodeImpl.d.ts +5 -4
- package/dist/types/model/Nodes.d.ts +13 -4
- package/dist/types/model/ProcessContext.d.ts +16 -1
- package/dist/types/model/Project.d.ts +19 -7
- package/dist/types/model/ProjectReferenceLoader.d.ts +5 -0
- package/dist/types/model/RivetPlugin.d.ts +6 -0
- package/dist/types/model/RivetUIContext.d.ts +5 -1
- package/dist/types/model/Settings.d.ts +1 -0
- package/dist/types/model/nodes/AssemblePromptNode.d.ts +4 -1
- package/dist/types/model/nodes/ChatLoopNode.d.ts +21 -0
- package/dist/types/model/nodes/ChatNode.d.ts +2 -62
- package/dist/types/model/nodes/ChatNodeBase.d.ts +85 -0
- package/dist/types/model/nodes/CodeNode.d.ts +8 -2
- package/dist/types/model/nodes/CronNode.d.ts +34 -0
- package/dist/types/model/nodes/DelegateFunctionCallNode.d.ts +1 -0
- package/dist/types/model/nodes/DocumentNode.d.ts +28 -0
- package/dist/types/model/nodes/ImageToMDNode.d.ts +20 -0
- package/dist/types/model/nodes/LoopUntilNode.d.ts +32 -0
- package/dist/types/model/nodes/PromptNode.d.ts +2 -0
- package/dist/types/model/nodes/ReadAllFilesNode.d.ts +30 -0
- package/dist/types/model/nodes/ReadDirectoryNode.d.ts +1 -1
- package/dist/types/model/nodes/ReferencedGraphAliasNode.d.ts +31 -0
- package/dist/types/model/nodes/ToMarkdownTableNode.d.ts +19 -0
- package/dist/types/model/nodes/ToTreeNode.d.ts +21 -0
- package/dist/types/model/nodes/UserInputNode.d.ts +2 -3
- package/dist/types/plugins/anthropic/anthropic.d.ts +94 -13
- package/dist/types/plugins/anthropic/nodes/ChatAnthropicNode.d.ts +7 -2
- package/dist/types/plugins/google/google.d.ts +93 -18
- package/dist/types/plugins/google/nodes/ChatGoogleNode.d.ts +3 -2
- package/dist/types/recording/RecordedEvents.d.ts +2 -0
- package/dist/types/utils/base64.d.ts +1 -1
- package/dist/types/utils/chatMessageToOpenAIChatCompletionMessage.d.ts +3 -1
- package/dist/types/utils/interpolation.d.ts +3 -0
- package/dist/types/utils/openai.d.ts +127 -21
- package/dist/types/utils/paths.d.ts +8 -0
- package/package.json +15 -11
- /package/dist/types/model/nodes/{GptFunctionNode.d.ts → ToolNode.d.ts} +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { anthropicModelOptions, anthropicModels, streamChatCompletions, AnthropicError, streamMessageApi,
|
|
1
|
+
import {} from '../../../index.js';
|
|
2
|
+
import { anthropicModelOptions, anthropicModels, streamChatCompletions, AnthropicError, streamMessageApi, } from '../anthropic.js';
|
|
3
3
|
import { nanoid } from 'nanoid/non-secure';
|
|
4
4
|
import { dedent } from 'ts-dedent';
|
|
5
5
|
import retry from 'p-retry';
|
|
@@ -11,6 +11,8 @@ import { pluginNodeDefinition } from '../../../model/NodeDefinition.js';
|
|
|
11
11
|
import { getScalarTypeOf, isArrayDataValue } from '../../../model/DataValue.js';
|
|
12
12
|
import { assertNever } from '../../../utils/assertNever.js';
|
|
13
13
|
import { isNotNull } from '../../../utils/genericUtilFunctions.js';
|
|
14
|
+
import { uint8ArrayToBase64 } from '../../../utils/base64.js';
|
|
15
|
+
import { getInputOrData } from '../../../utils/inputs.js';
|
|
14
16
|
// Temporary
|
|
15
17
|
const cache = new Map();
|
|
16
18
|
export const ChatAnthropicNodeImpl = {
|
|
@@ -25,7 +27,7 @@ export const ChatAnthropicNodeImpl = {
|
|
|
25
27
|
width: 275,
|
|
26
28
|
},
|
|
27
29
|
data: {
|
|
28
|
-
model: 'claude-3-
|
|
30
|
+
model: 'claude-3-7-sonnet-latest',
|
|
29
31
|
useModelInput: false,
|
|
30
32
|
temperature: 0.5,
|
|
31
33
|
useTemperatureInput: false,
|
|
@@ -43,6 +45,11 @@ export const ChatAnthropicNodeImpl = {
|
|
|
43
45
|
cache: false,
|
|
44
46
|
useAsGraphPartialOutput: true,
|
|
45
47
|
enableToolUse: false,
|
|
48
|
+
endpoint: '',
|
|
49
|
+
useEndpointInput: false,
|
|
50
|
+
overrideModel: undefined,
|
|
51
|
+
useOverrideModelInput: false,
|
|
52
|
+
enableCitations: false,
|
|
46
53
|
},
|
|
47
54
|
};
|
|
48
55
|
return chartNode;
|
|
@@ -122,12 +129,20 @@ export const ChatAnthropicNodeImpl = {
|
|
|
122
129
|
id: 'response',
|
|
123
130
|
title: 'Response',
|
|
124
131
|
});
|
|
132
|
+
if (data.enableCitations) {
|
|
133
|
+
outputs.push({
|
|
134
|
+
dataType: 'object[]',
|
|
135
|
+
id: 'citations',
|
|
136
|
+
title: 'Citations',
|
|
137
|
+
description: 'Citations from the response, if any.',
|
|
138
|
+
});
|
|
139
|
+
}
|
|
125
140
|
if (data.enableToolUse) {
|
|
126
141
|
outputs.push({
|
|
127
142
|
dataType: 'object[]',
|
|
128
143
|
id: 'function-calls',
|
|
129
|
-
title: '
|
|
130
|
-
description: 'The
|
|
144
|
+
title: 'Tool Calls',
|
|
145
|
+
description: 'The tool calls that were made, if any.',
|
|
131
146
|
});
|
|
132
147
|
}
|
|
133
148
|
outputs.push({
|
|
@@ -139,7 +154,9 @@ export const ChatAnthropicNodeImpl = {
|
|
|
139
154
|
return outputs;
|
|
140
155
|
},
|
|
141
156
|
getBody(data) {
|
|
142
|
-
const modelName =
|
|
157
|
+
const modelName = data.overrideModel
|
|
158
|
+
? data.overrideModel
|
|
159
|
+
: anthropicModels[data.model]?.displayName ?? 'Unknown Model';
|
|
143
160
|
return dedent `
|
|
144
161
|
${modelName}
|
|
145
162
|
${data.useTopP
|
|
@@ -152,65 +169,105 @@ export const ChatAnthropicNodeImpl = {
|
|
|
152
169
|
getEditors() {
|
|
153
170
|
return [
|
|
154
171
|
{
|
|
155
|
-
type: '
|
|
156
|
-
label: '
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
172
|
+
type: 'group',
|
|
173
|
+
label: 'Parameters',
|
|
174
|
+
defaultOpen: true,
|
|
175
|
+
editors: [
|
|
176
|
+
{
|
|
177
|
+
type: 'dropdown',
|
|
178
|
+
label: 'Model',
|
|
179
|
+
dataKey: 'model',
|
|
180
|
+
useInputToggleDataKey: 'useModelInput',
|
|
181
|
+
options: anthropicModelOptions,
|
|
182
|
+
disableIf: (d) => !!d.overrideModel?.trim(),
|
|
183
|
+
helperMessage: (d) => (!!d.overrideModel ? `Model is overridden to: ${d.overrideModel}` : ''),
|
|
184
|
+
},
|
|
185
|
+
{
|
|
186
|
+
type: 'number',
|
|
187
|
+
label: 'Temperature',
|
|
188
|
+
dataKey: 'temperature',
|
|
189
|
+
useInputToggleDataKey: 'useTemperatureInput',
|
|
190
|
+
min: 0,
|
|
191
|
+
max: 2,
|
|
192
|
+
step: 0.1,
|
|
193
|
+
},
|
|
194
|
+
{
|
|
195
|
+
type: 'number',
|
|
196
|
+
label: 'Top P',
|
|
197
|
+
dataKey: 'top_p',
|
|
198
|
+
useInputToggleDataKey: 'useTopPInput',
|
|
199
|
+
min: 0,
|
|
200
|
+
max: 1,
|
|
201
|
+
step: 0.1,
|
|
202
|
+
},
|
|
203
|
+
{
|
|
204
|
+
type: 'toggle',
|
|
205
|
+
label: 'Use Top P',
|
|
206
|
+
dataKey: 'useTopP',
|
|
207
|
+
useInputToggleDataKey: 'useUseTopPInput',
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
type: 'number',
|
|
211
|
+
label: 'Max Tokens',
|
|
212
|
+
dataKey: 'maxTokens',
|
|
213
|
+
useInputToggleDataKey: 'useMaxTokensInput',
|
|
214
|
+
min: 0,
|
|
215
|
+
max: Number.MAX_SAFE_INTEGER,
|
|
216
|
+
step: 1,
|
|
217
|
+
},
|
|
218
|
+
{
|
|
219
|
+
type: 'string',
|
|
220
|
+
label: 'Stop',
|
|
221
|
+
dataKey: 'stop',
|
|
222
|
+
useInputToggleDataKey: 'useStopInput',
|
|
223
|
+
},
|
|
224
|
+
],
|
|
204
225
|
},
|
|
205
226
|
{
|
|
206
|
-
type: '
|
|
207
|
-
label: '
|
|
208
|
-
|
|
227
|
+
type: 'group',
|
|
228
|
+
label: 'Tools',
|
|
229
|
+
editors: [
|
|
230
|
+
{
|
|
231
|
+
type: 'toggle',
|
|
232
|
+
label: 'Enable Tool Use (disables streaming)',
|
|
233
|
+
dataKey: 'enableToolUse',
|
|
234
|
+
},
|
|
235
|
+
{
|
|
236
|
+
type: 'toggle',
|
|
237
|
+
label: 'Enable Citations',
|
|
238
|
+
dataKey: 'enableCitations',
|
|
239
|
+
},
|
|
240
|
+
],
|
|
209
241
|
},
|
|
210
242
|
{
|
|
211
|
-
type: '
|
|
212
|
-
label: '
|
|
213
|
-
|
|
243
|
+
type: 'group',
|
|
244
|
+
label: 'Advanced',
|
|
245
|
+
editors: [
|
|
246
|
+
{
|
|
247
|
+
type: 'toggle',
|
|
248
|
+
label: 'Cache (same inputs, same outputs)',
|
|
249
|
+
dataKey: 'cache',
|
|
250
|
+
},
|
|
251
|
+
{
|
|
252
|
+
type: 'toggle',
|
|
253
|
+
label: 'Use for subgraph partial output',
|
|
254
|
+
dataKey: 'useAsGraphPartialOutput',
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
type: 'string',
|
|
258
|
+
label: 'Endpoint',
|
|
259
|
+
dataKey: 'endpoint',
|
|
260
|
+
useInputToggleDataKey: 'useEndpointInput',
|
|
261
|
+
helperMessage: 'Overrides the Anthropic API endpoint. Leave blank to use the default configured endpoint in settings, or https://api.anthropic.com/v1 if none is configured.',
|
|
262
|
+
},
|
|
263
|
+
{
|
|
264
|
+
type: 'string',
|
|
265
|
+
label: 'Override Model',
|
|
266
|
+
dataKey: 'overrideModel',
|
|
267
|
+
useInputToggleDataKey: 'useOverrideModelInput',
|
|
268
|
+
helperMessage: 'Overrides the AI model used for the chat node to this value.',
|
|
269
|
+
},
|
|
270
|
+
],
|
|
214
271
|
},
|
|
215
272
|
];
|
|
216
273
|
},
|
|
@@ -225,14 +282,10 @@ export const ChatAnthropicNodeImpl = {
|
|
|
225
282
|
};
|
|
226
283
|
},
|
|
227
284
|
async process(data, inputs, context) {
|
|
228
|
-
if (context.executor === 'browser') {
|
|
229
|
-
throw new Error('This node requires using the Node executor');
|
|
230
|
-
}
|
|
231
285
|
const output = {};
|
|
232
|
-
const rawModel = data
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
const model = rawModel;
|
|
286
|
+
const rawModel = getInputOrData(data, inputs, 'model');
|
|
287
|
+
const overrideModel = getInputOrData(data, inputs, 'overrideModel');
|
|
288
|
+
const model = (overrideModel || rawModel);
|
|
236
289
|
const temperature = data.useTemperatureInput
|
|
237
290
|
? coerceTypeOptional(inputs['temperature'], 'number') ?? data.temperature
|
|
238
291
|
: data.temperature;
|
|
@@ -268,6 +321,9 @@ export const ChatAnthropicNodeImpl = {
|
|
|
268
321
|
prompt += '\n\nAssistant:';
|
|
269
322
|
// Get the "System" prompt input for Claude 3 models
|
|
270
323
|
const system = data.model.startsWith('claude-3') ? getSystemPrompt(inputs) : undefined;
|
|
324
|
+
const systemInput = inputs['system'];
|
|
325
|
+
const includesCacheBreakpoint = rivetChatMessages.some((m) => m.isCacheBreakpoint) ||
|
|
326
|
+
(systemInput?.type === 'chat-message' && systemInput.value.isCacheBreakpoint);
|
|
271
327
|
let { maxTokens } = data;
|
|
272
328
|
const tokenizerInfo = {
|
|
273
329
|
node: context.node,
|
|
@@ -306,7 +362,7 @@ export const ChatAnthropicNodeImpl = {
|
|
|
306
362
|
top_p: useTopP ? topP : undefined,
|
|
307
363
|
max_tokens: maxTokens ?? modelInfo.maxTokens,
|
|
308
364
|
stop_sequences: stop ? [stop] : undefined,
|
|
309
|
-
system
|
|
365
|
+
system,
|
|
310
366
|
messages,
|
|
311
367
|
tools: tools
|
|
312
368
|
? tools.map((tool) => ({ name: tool.name, description: tool.description, input_schema: tool.parameters }))
|
|
@@ -322,77 +378,79 @@ export const ChatAnthropicNodeImpl = {
|
|
|
322
378
|
}
|
|
323
379
|
const startTime = Date.now();
|
|
324
380
|
const apiKey = context.getPluginConfig('anthropicApiKey');
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
...messageOptions,
|
|
330
|
-
});
|
|
331
|
-
const { input_tokens: requestTokens, output_tokens: responseTokens } = response.usage;
|
|
332
|
-
const responseText = response.content
|
|
333
|
-
.map((c) => c.text)
|
|
334
|
-
.filter(isNotNull)
|
|
335
|
-
.join('');
|
|
336
|
-
output['response'] = {
|
|
337
|
-
type: 'string',
|
|
338
|
-
value: responseText,
|
|
339
|
-
};
|
|
340
|
-
const functionCalls = response.content
|
|
341
|
-
.filter((content) => content.name && content.id)
|
|
342
|
-
.map((functionCall) => ({
|
|
343
|
-
name: functionCall.name,
|
|
344
|
-
arguments: functionCall.input, // Matches OpenAI ChatNode
|
|
345
|
-
id: functionCall.id,
|
|
346
|
-
}));
|
|
347
|
-
if (functionCalls.length > 0) {
|
|
348
|
-
output['function-calls'] = {
|
|
349
|
-
type: 'object[]',
|
|
350
|
-
value: functionCalls,
|
|
351
|
-
};
|
|
352
|
-
}
|
|
353
|
-
output['all-messages'] = {
|
|
354
|
-
type: 'chat-message[]',
|
|
355
|
-
value: [
|
|
356
|
-
...rivetChatMessages,
|
|
357
|
-
{
|
|
358
|
-
type: 'assistant',
|
|
359
|
-
message: responseText,
|
|
360
|
-
function_call: functionCalls.length > 0
|
|
361
|
-
? functionCalls.map((toolCall) => ({
|
|
362
|
-
name: toolCall.name,
|
|
363
|
-
arguments: JSON.stringify(toolCall.arguments),
|
|
364
|
-
id: toolCall.id,
|
|
365
|
-
}))[0]
|
|
366
|
-
: undefined,
|
|
367
|
-
function_calls: functionCalls.map((toolCall) => ({
|
|
368
|
-
name: toolCall.name,
|
|
369
|
-
arguments: JSON.stringify(toolCall.arguments),
|
|
370
|
-
id: toolCall.id,
|
|
371
|
-
})),
|
|
372
|
-
},
|
|
373
|
-
],
|
|
374
|
-
};
|
|
375
|
-
output['requestTokens'] = { type: 'number', value: requestTokens ?? tokenCountEstimate };
|
|
376
|
-
const responseTokenCount = responseTokens ?? context.tokenizer.getTokenCountForString(responseText, tokenizerInfo);
|
|
377
|
-
output['responseTokens'] = { type: 'number', value: responseTokenCount };
|
|
378
|
-
}
|
|
379
|
-
else if (useMessageApi) {
|
|
381
|
+
const defaultApiEndpoint = context.getPluginConfig('anthropicApiEndpoint') || 'https://api.anthropic.com/v1';
|
|
382
|
+
const configuredEndpoint = getInputOrData(data, inputs, 'endpoint');
|
|
383
|
+
const apiEndpoint = configuredEndpoint?.trim() ? configuredEndpoint : defaultApiEndpoint;
|
|
384
|
+
if (useMessageApi) {
|
|
380
385
|
// Use the messages API for Claude 3 models
|
|
381
386
|
const chunks = streamMessageApi({
|
|
387
|
+
apiEndpoint,
|
|
382
388
|
apiKey: apiKey ?? '',
|
|
383
389
|
signal: context.signal,
|
|
390
|
+
beta: 'prompt-caching-2024-07-31',
|
|
384
391
|
...messageOptions,
|
|
385
392
|
});
|
|
386
393
|
// Process the response chunks and update the output
|
|
387
394
|
const responseParts = [];
|
|
388
|
-
let requestTokens = undefined
|
|
395
|
+
let requestTokens = undefined;
|
|
396
|
+
let responseTokens = undefined;
|
|
397
|
+
const citations = [];
|
|
398
|
+
// Track tool calls
|
|
399
|
+
const toolCalls = [];
|
|
400
|
+
let currentToolCall = null;
|
|
401
|
+
let accumulatedJsonString = '';
|
|
389
402
|
for await (const chunk of chunks) {
|
|
390
403
|
let completion = '';
|
|
391
404
|
if (chunk.type === 'content_block_start') {
|
|
392
|
-
|
|
405
|
+
if (chunk.content_block.type === 'text') {
|
|
406
|
+
completion = chunk.content_block.text || '';
|
|
407
|
+
}
|
|
408
|
+
else if (chunk.content_block.type === 'tool_use') {
|
|
409
|
+
currentToolCall = {
|
|
410
|
+
id: chunk.content_block.id,
|
|
411
|
+
name: chunk.content_block.name,
|
|
412
|
+
input: chunk.content_block.input || {},
|
|
413
|
+
};
|
|
414
|
+
accumulatedJsonString = '';
|
|
415
|
+
}
|
|
393
416
|
}
|
|
394
417
|
else if (chunk.type === 'content_block_delta') {
|
|
395
|
-
|
|
418
|
+
if (chunk.delta.type === 'text_delta') {
|
|
419
|
+
completion = chunk.delta.text;
|
|
420
|
+
}
|
|
421
|
+
else if (chunk.delta.type === 'input_json_delta') {
|
|
422
|
+
if (currentToolCall) {
|
|
423
|
+
accumulatedJsonString += chunk.delta.partial_json || '';
|
|
424
|
+
try {
|
|
425
|
+
// Try to parse the accumulated JSON
|
|
426
|
+
const parsedJson = JSON.parse(accumulatedJsonString);
|
|
427
|
+
currentToolCall.input = parsedJson;
|
|
428
|
+
accumulatedJsonString = '';
|
|
429
|
+
}
|
|
430
|
+
catch (e) {
|
|
431
|
+
// Not valid JSON yet, keep accumulating
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
else if (chunk.delta.type === 'citations_delta') {
|
|
436
|
+
citations.push(chunk.delta.citation);
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
else if (chunk.type === 'content_block_stop') {
|
|
440
|
+
if (currentToolCall) {
|
|
441
|
+
if (accumulatedJsonString) {
|
|
442
|
+
try {
|
|
443
|
+
const parsedJson = JSON.parse(accumulatedJsonString);
|
|
444
|
+
currentToolCall.input = parsedJson;
|
|
445
|
+
}
|
|
446
|
+
catch (e) {
|
|
447
|
+
console.warn('Failed to parse tool call JSON input:', accumulatedJsonString);
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
toolCalls.push({ ...currentToolCall });
|
|
451
|
+
currentToolCall = null;
|
|
452
|
+
accumulatedJsonString = '';
|
|
453
|
+
}
|
|
396
454
|
}
|
|
397
455
|
else if (chunk.type === 'message_start' && chunk.message?.usage?.input_tokens) {
|
|
398
456
|
requestTokens = chunk.message.usage.input_tokens;
|
|
@@ -400,14 +458,39 @@ export const ChatAnthropicNodeImpl = {
|
|
|
400
458
|
else if (chunk.type === 'message_delta' && chunk.delta?.usage?.output_tokens) {
|
|
401
459
|
responseTokens = chunk.delta.usage.output_tokens;
|
|
402
460
|
}
|
|
403
|
-
if (
|
|
404
|
-
|
|
461
|
+
if (completion) {
|
|
462
|
+
responseParts.push(completion);
|
|
405
463
|
}
|
|
406
|
-
responseParts.push(completion);
|
|
407
464
|
output['response'] = {
|
|
408
465
|
type: 'string',
|
|
409
466
|
value: responseParts.join('').trim(),
|
|
410
467
|
};
|
|
468
|
+
if (toolCalls.length > 0) {
|
|
469
|
+
output['function-calls'] = {
|
|
470
|
+
type: 'object[]',
|
|
471
|
+
value: toolCalls.map((tool) => ({
|
|
472
|
+
id: tool.id,
|
|
473
|
+
name: tool.name,
|
|
474
|
+
arguments: tool.input,
|
|
475
|
+
})),
|
|
476
|
+
};
|
|
477
|
+
}
|
|
478
|
+
else {
|
|
479
|
+
output['function-calls'] = {
|
|
480
|
+
type: 'control-flow-excluded',
|
|
481
|
+
value: undefined,
|
|
482
|
+
};
|
|
483
|
+
}
|
|
484
|
+
output['citations'] = {
|
|
485
|
+
type: 'object[]',
|
|
486
|
+
value: citations,
|
|
487
|
+
};
|
|
488
|
+
// Format function calls for the ChatMessage interface
|
|
489
|
+
const functionCalls = toolCalls.map((tool) => ({
|
|
490
|
+
name: tool.name,
|
|
491
|
+
arguments: typeof tool.input === 'object' ? JSON.stringify(tool.input) : tool.input,
|
|
492
|
+
id: tool.id,
|
|
493
|
+
}));
|
|
411
494
|
output['all-messages'] = {
|
|
412
495
|
type: 'chat-message[]',
|
|
413
496
|
value: [
|
|
@@ -415,15 +498,16 @@ export const ChatAnthropicNodeImpl = {
|
|
|
415
498
|
{
|
|
416
499
|
type: 'assistant',
|
|
417
500
|
message: responseParts.join('').trim(),
|
|
418
|
-
function_call: undefined,
|
|
419
|
-
function_calls: undefined,
|
|
501
|
+
function_call: functionCalls.length === 1 ? functionCalls[0] : undefined,
|
|
502
|
+
function_calls: functionCalls.length > 0 ? functionCalls : undefined,
|
|
420
503
|
},
|
|
421
504
|
],
|
|
422
505
|
};
|
|
423
506
|
context.onPartialOutputs?.(output);
|
|
424
507
|
}
|
|
425
|
-
|
|
426
|
-
|
|
508
|
+
// Final validation
|
|
509
|
+
if (responseParts.length === 0 && toolCalls.length === 0) {
|
|
510
|
+
throw new Error('No response or tool calls received from Anthropic');
|
|
427
511
|
}
|
|
428
512
|
output['requestTokens'] = { type: 'number', value: requestTokens ?? tokenCountEstimate };
|
|
429
513
|
const responseTokenCount = responseTokens ?? (await context.tokenizer.getTokenCountForString(responseParts.join(''), tokenizerInfo));
|
|
@@ -432,6 +516,7 @@ export const ChatAnthropicNodeImpl = {
|
|
|
432
516
|
else {
|
|
433
517
|
// Use the normal chat completion method for non-Claude 3 models
|
|
434
518
|
const chunks = streamChatCompletions({
|
|
519
|
+
apiEndpoint,
|
|
435
520
|
apiKey: apiKey ?? '',
|
|
436
521
|
signal: context.signal,
|
|
437
522
|
...completionOptions,
|
|
@@ -513,22 +598,36 @@ export const ChatAnthropicNodeImpl = {
|
|
|
513
598
|
};
|
|
514
599
|
export const chatAnthropicNode = pluginNodeDefinition(ChatAnthropicNodeImpl, 'Chat');
|
|
515
600
|
export function getSystemPrompt(inputs) {
|
|
516
|
-
const
|
|
601
|
+
const systemInput = inputs['system'];
|
|
602
|
+
const system = coerceTypeOptional(systemInput, 'string');
|
|
517
603
|
if (system) {
|
|
518
|
-
return
|
|
604
|
+
return [
|
|
605
|
+
{
|
|
606
|
+
type: 'text',
|
|
607
|
+
text: system,
|
|
608
|
+
cache_control: systemInput?.type === 'chat-message'
|
|
609
|
+
? systemInput.value.isCacheBreakpoint
|
|
610
|
+
? { type: 'ephemeral' }
|
|
611
|
+
: null
|
|
612
|
+
: null,
|
|
613
|
+
},
|
|
614
|
+
];
|
|
519
615
|
}
|
|
520
616
|
const prompt = inputs['prompt'];
|
|
521
617
|
if (prompt && prompt.type === 'chat-message[]') {
|
|
522
|
-
const
|
|
523
|
-
if (
|
|
524
|
-
|
|
525
|
-
return
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
618
|
+
const systemMessages = prompt.value.filter((message) => message.type === 'system');
|
|
619
|
+
if (systemMessages.length) {
|
|
620
|
+
const converted = systemMessages.map((message) => {
|
|
621
|
+
return {
|
|
622
|
+
type: 'text',
|
|
623
|
+
text: coerceType({ type: 'chat-message', value: message }, 'string'),
|
|
624
|
+
cache_control: message.isCacheBreakpoint ? { type: 'ephemeral' } : null,
|
|
625
|
+
};
|
|
626
|
+
});
|
|
627
|
+
return converted;
|
|
530
628
|
}
|
|
531
629
|
}
|
|
630
|
+
return undefined;
|
|
532
631
|
}
|
|
533
632
|
function getChatMessages(inputs) {
|
|
534
633
|
const prompt = inputs['prompt'];
|
|
@@ -567,8 +666,7 @@ export async function chatMessagesToClaude3ChatMessages(chatMessages) {
|
|
|
567
666
|
message.content[0].type === 'tool_result') {
|
|
568
667
|
const last = acc.at(-1);
|
|
569
668
|
if (last?.role === 'user' && Array.isArray(last.content) && last.content.every((c) => c.type === 'tool_result')) {
|
|
570
|
-
const
|
|
571
|
-
const content = last.content.concat(mc);
|
|
669
|
+
const content = last.content.concat(message.content);
|
|
572
670
|
return [...acc.slice(0, -1), { ...last, content }];
|
|
573
671
|
}
|
|
574
672
|
}
|
|
@@ -592,12 +690,13 @@ async function chatMessageToClaude3ChatMessage(message) {
|
|
|
592
690
|
type: 'tool_result',
|
|
593
691
|
tool_use_id: message.name,
|
|
594
692
|
content: content.length === 1 ? content[0].text : content,
|
|
693
|
+
cache_control: message.isCacheBreakpoint ? { type: 'ephemeral' } : null,
|
|
595
694
|
},
|
|
596
695
|
],
|
|
597
696
|
};
|
|
598
697
|
}
|
|
599
698
|
const content = Array.isArray(message.message)
|
|
600
|
-
? await Promise.all(message.message.map(chatMessageContentToClaude3ChatMessage))
|
|
699
|
+
? await Promise.all(message.message.map((part) => chatMessageContentToClaude3ChatMessage(part)))
|
|
601
700
|
: [await chatMessageContentToClaude3ChatMessage(message.message)];
|
|
602
701
|
if (message.type === 'assistant' && message.function_calls) {
|
|
603
702
|
content.push(...message.function_calls.map((fc) => ({
|
|
@@ -605,6 +704,7 @@ async function chatMessageToClaude3ChatMessage(message) {
|
|
|
605
704
|
id: fc.id,
|
|
606
705
|
name: fc.name,
|
|
607
706
|
input: JSON.parse(fc.arguments),
|
|
707
|
+
cache_control: message.isCacheBreakpoint ? { type: 'ephemeral' } : null,
|
|
608
708
|
})));
|
|
609
709
|
}
|
|
610
710
|
else if (message.type === 'assistant' && message.function_call) {
|
|
@@ -613,8 +713,13 @@ async function chatMessageToClaude3ChatMessage(message) {
|
|
|
613
713
|
id: message.function_call.id,
|
|
614
714
|
name: message.function_call.name,
|
|
615
715
|
input: JSON.parse(message.function_call.arguments),
|
|
716
|
+
cache_control: message.isCacheBreakpoint ? { type: 'ephemeral' } : null,
|
|
616
717
|
});
|
|
617
718
|
}
|
|
719
|
+
// If the message is a cache breakpoint, cache using the last content item of the message
|
|
720
|
+
if (message.isCacheBreakpoint) {
|
|
721
|
+
content.at(-1).cache_control = { type: 'ephemeral' };
|
|
722
|
+
}
|
|
618
723
|
return {
|
|
619
724
|
role: message.type,
|
|
620
725
|
content,
|
|
@@ -625,6 +730,7 @@ async function chatMessageContentToClaude3ChatMessage(content) {
|
|
|
625
730
|
return {
|
|
626
731
|
type: 'text',
|
|
627
732
|
text: content,
|
|
733
|
+
cache_control: null, // set later
|
|
628
734
|
};
|
|
629
735
|
}
|
|
630
736
|
switch (content.type) {
|
|
@@ -636,9 +742,23 @@ async function chatMessageContentToClaude3ChatMessage(content) {
|
|
|
636
742
|
media_type: content.mediaType,
|
|
637
743
|
data: (await uint8ArrayToBase64(content.data)) ?? '',
|
|
638
744
|
},
|
|
745
|
+
cache_control: null, // set later
|
|
639
746
|
};
|
|
640
747
|
case 'url':
|
|
641
748
|
throw new Error('unable to convert urls for Claude');
|
|
749
|
+
case 'document':
|
|
750
|
+
return {
|
|
751
|
+
type: 'document',
|
|
752
|
+
source: {
|
|
753
|
+
type: 'base64',
|
|
754
|
+
data: (await uint8ArrayToBase64(content.data)) ?? '',
|
|
755
|
+
media_type: content.mediaType,
|
|
756
|
+
},
|
|
757
|
+
title: content.title?.trim() ? content.title.trim() : undefined,
|
|
758
|
+
context: content.context?.trim() ? content.context.trim() : undefined,
|
|
759
|
+
citations: content.enableCitations ? { enabled: true } : undefined,
|
|
760
|
+
cache_control: null, // set later
|
|
761
|
+
};
|
|
642
762
|
default:
|
|
643
763
|
assertNever(content);
|
|
644
764
|
}
|
|
@@ -8,11 +8,19 @@ export const anthropicPlugin = {
|
|
|
8
8
|
},
|
|
9
9
|
configSpec: {
|
|
10
10
|
anthropicApiKey: {
|
|
11
|
-
type: '
|
|
11
|
+
type: 'secret',
|
|
12
12
|
label: 'Anthropic API Key',
|
|
13
13
|
description: 'The API key for the Anthropic service.',
|
|
14
14
|
pullEnvironmentVariable: 'ANTHROPIC_API_KEY',
|
|
15
15
|
helperText: 'You may also set the ANTHROPIC_API_KEY environment variable.',
|
|
16
16
|
},
|
|
17
|
+
anthropicApiEndpoint: {
|
|
18
|
+
type: 'string',
|
|
19
|
+
label: 'Anthropic API Endpoint',
|
|
20
|
+
description: 'The API endpoint for the Anthropic service.',
|
|
21
|
+
pullEnvironmentVariable: 'ANTHROPIC_API_ENDPOINT',
|
|
22
|
+
helperText: 'Defaults to https://api.anthropic.com/v1. You may also set the ANTHROPIC_API_ENDPOINT environment variable.',
|
|
23
|
+
default: 'https://api.anthropic.com/v1',
|
|
24
|
+
},
|
|
17
25
|
},
|
|
18
26
|
};
|
|
@@ -49,9 +49,9 @@ export const runGentraceTests = async (gentracePipelineSlug, settings, project,
|
|
|
49
49
|
}, rivetFormattedInputs);
|
|
50
50
|
const fullRecording = recorder.getRecording();
|
|
51
51
|
const stepRuns = convertRecordingToStepRuns(fullRecording, project, graphId);
|
|
52
|
-
|
|
53
|
-
runner.addStepRunNode(stepRun);
|
|
54
|
-
}
|
|
52
|
+
for (const stepRun of stepRuns) {
|
|
53
|
+
await runner.addStepRunNode(stepRun);
|
|
54
|
+
}
|
|
55
55
|
if (stepRuns.length === 0) {
|
|
56
56
|
throw new Error('No Rivet steps found. You need operations which are not Graph Input or Graph Output nodes.');
|
|
57
57
|
}
|
|
@@ -77,9 +77,9 @@ export const runRemoteGentraceTests = async (gentracePipelineSlug, settings, pro
|
|
|
77
77
|
const rivetFormattedInputs = mapValues(testCase.inputs, inferType);
|
|
78
78
|
const fullRecording = await runAndRecord(rivetFormattedInputs);
|
|
79
79
|
const stepRuns = convertRecordingToStepRuns(fullRecording, project, graphId);
|
|
80
|
-
|
|
81
|
-
runner.addStepRunNode(stepRun);
|
|
82
|
-
}
|
|
80
|
+
for (const stepRun of stepRuns) {
|
|
81
|
+
await runner.addStepRunNode(stepRun);
|
|
82
|
+
}
|
|
83
83
|
if (stepRuns.length === 0) {
|
|
84
84
|
throw new Error('No Rivet steps found. You need operations which are not Graph Input or Graph Output nodes.');
|
|
85
85
|
}
|