@alpic80/rivet-core 1.24.0-aidon.1 → 1.24.0-aidon.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/bundle.cjs +642 -420
- package/dist/cjs/bundle.cjs.map +4 -4
- package/dist/esm/integrations/DatasetProvider.js +1 -1
- package/dist/esm/model/GraphProcessor.js +1 -3
- package/dist/esm/model/Nodes.js +3 -0
- package/dist/esm/model/nodes/ChatNodeBase.js +10 -8
- package/dist/esm/model/nodes/CodeNode.js +0 -1
- package/dist/esm/model/nodes/CronNode.js +1 -1
- package/dist/esm/model/nodes/GetAllDatasetsNode.js +1 -1
- package/dist/esm/model/nodes/LoopControllerNode.js +1 -1
- package/dist/esm/model/nodes/ObjectNode.js +1 -1
- package/dist/esm/model/nodes/RaceInputsNode.js +1 -2
- package/dist/esm/model/nodes/ReplaceDatasetNode.js +1 -1
- package/dist/esm/model/nodes/SliceNode.js +0 -1
- package/dist/esm/model/nodes/SplitNode.js +1 -1
- package/dist/esm/model/nodes/SubGraphNode.js +0 -1
- package/dist/esm/model/nodes/WriteFileNode.js +147 -0
- package/dist/esm/native/BrowserNativeApi.js +16 -1
- package/dist/esm/plugins/aidon/nodes/ChatAidonNode.js +3 -3
- package/dist/esm/plugins/anthropic/anthropic.js +0 -4
- package/dist/esm/plugins/anthropic/nodes/ChatAnthropicNode.js +0 -3
- package/dist/esm/plugins/assemblyAi/LemurQaNode.js +1 -1
- package/dist/esm/plugins/assemblyAi/LemurSummaryNode.js +1 -1
- package/dist/esm/plugins/google/google.js +10 -4
- package/dist/esm/plugins/google/nodes/ChatGoogleNode.js +10 -4
- package/dist/esm/plugins/openai/nodes/ThreadMessageNode.js +1 -1
- package/dist/esm/recording/ExecutionRecorder.js +54 -3
- package/dist/esm/utils/base64.js +13 -0
- package/dist/esm/utils/coerceType.js +3 -0
- package/dist/types/integrations/DatasetProvider.d.ts +1 -1
- package/dist/types/model/Nodes.d.ts +3 -2
- package/dist/types/model/nodes/ChatNodeBase.d.ts +1 -1
- package/dist/types/model/nodes/GetAllDatasetsNode.d.ts +2 -2
- package/dist/types/model/nodes/ObjectNode.d.ts +2 -2
- package/dist/types/model/nodes/RaceInputsNode.d.ts +1 -2
- package/dist/types/model/nodes/SplitNode.d.ts +2 -2
- package/dist/types/model/nodes/WriteFileNode.d.ts +23 -0
- package/dist/types/native/BrowserNativeApi.d.ts +8 -5
- package/dist/types/native/NativeApi.d.ts +12 -1
- package/dist/types/plugins/google/google.d.ts +8 -0
- package/dist/types/recording/RecordedEvents.d.ts +1 -0
- package/dist/types/utils/base64.d.ts +1 -0
- package/dist/types/utils/serialization/serialization_v3.d.ts +1 -0
- package/package.json +2 -2
|
@@ -9,7 +9,7 @@ export class InMemoryDatasetProvider {
|
|
|
9
9
|
const dataset = this.#datasets.find((d) => d.meta.id === id);
|
|
10
10
|
return dataset?.meta;
|
|
11
11
|
}
|
|
12
|
-
async getDatasetsForProject(
|
|
12
|
+
async getDatasetsForProject() {
|
|
13
13
|
return this.#datasets.map((d) => d.meta);
|
|
14
14
|
}
|
|
15
15
|
async getDatasetData(id) {
|
|
@@ -76,6 +76,7 @@ export class GraphProcessor {
|
|
|
76
76
|
#loadedProjects = undefined;
|
|
77
77
|
#definitions = undefined;
|
|
78
78
|
#scc = undefined;
|
|
79
|
+
// @ts-expect-error
|
|
79
80
|
#nodesNotInCycle = undefined;
|
|
80
81
|
#nodeAbortControllers = new Map();
|
|
81
82
|
/** User input nodes that are pending user input. */
|
|
@@ -1394,9 +1395,6 @@ export class GraphProcessor {
|
|
|
1394
1395
|
}
|
|
1395
1396
|
return sccs;
|
|
1396
1397
|
}
|
|
1397
|
-
#nodeIsInCycle(nodeId) {
|
|
1398
|
-
return this.#nodesNotInCycle.find((node) => node.id === nodeId) == null;
|
|
1399
|
-
}
|
|
1400
1398
|
#nodesAreInSameCycle(a, b) {
|
|
1401
1399
|
return this.#scc.find((cycle) => cycle.find((node) => node.id === a) && cycle.find((node) => node.id === b));
|
|
1402
1400
|
}
|
package/dist/esm/model/Nodes.js
CHANGED
|
@@ -20,6 +20,8 @@ import { readDirectoryNode } from './nodes/ReadDirectoryNode.js';
|
|
|
20
20
|
export * from './nodes/ReadDirectoryNode.js';
|
|
21
21
|
import { readFileNode } from './nodes/ReadFileNode.js';
|
|
22
22
|
export * from './nodes/ReadFileNode.js';
|
|
23
|
+
import { writeFileNode } from './nodes/WriteFileNode.js';
|
|
24
|
+
export * from './nodes/WriteFileNode.js';
|
|
23
25
|
import { ifElseNode } from './nodes/IfElseNode.js';
|
|
24
26
|
export * from './nodes/IfElseNode.js';
|
|
25
27
|
import { chunkNode } from './nodes/ChunkNode.js';
|
|
@@ -175,6 +177,7 @@ export const registerBuiltInNodes = (registry) => {
|
|
|
175
177
|
.register(ifNode)
|
|
176
178
|
.register(readDirectoryNode)
|
|
177
179
|
.register(readFileNode)
|
|
180
|
+
.register(writeFileNode)
|
|
178
181
|
.register(ifElseNode)
|
|
179
182
|
.register(chunkNode)
|
|
180
183
|
.register(graphInputNode)
|
|
@@ -945,10 +945,10 @@ export const ChatNodeBase = {
|
|
|
945
945
|
? costs.audioCompletion
|
|
946
946
|
: 0
|
|
947
947
|
: 0;
|
|
948
|
-
const promptCost = getCostForTokens(response.usage.prompt_tokens_details.text_tokens,
|
|
949
|
-
const completionCost = getCostForTokens(response.usage.completion_tokens_details.text_tokens,
|
|
950
|
-
const audioPromptCost = getCostForTokens(response.usage.prompt_tokens_details.audio_tokens,
|
|
951
|
-
const audioCompletionCost = getCostForTokens(response.usage.completion_tokens_details.audio_tokens,
|
|
948
|
+
const promptCost = getCostForTokens(response.usage.prompt_tokens_details.text_tokens, promptCostPerThousand);
|
|
949
|
+
const completionCost = getCostForTokens(response.usage.completion_tokens_details.text_tokens, completionCostPerThousand);
|
|
950
|
+
const audioPromptCost = getCostForTokens(response.usage.prompt_tokens_details.audio_tokens, audioPromptCostPerThousand);
|
|
951
|
+
const audioCompletionCost = getCostForTokens(response.usage.completion_tokens_details.audio_tokens, audioCompletionCostPerThousand);
|
|
952
952
|
output['cost'] = {
|
|
953
953
|
type: 'number',
|
|
954
954
|
value: promptCost + completionCost + audioPromptCost + audioCompletionCost,
|
|
@@ -1124,8 +1124,8 @@ export const ChatNodeBase = {
|
|
|
1124
1124
|
: outputTokenCount;
|
|
1125
1125
|
const promptCostPerThousand = model in openaiModels ? openaiModels[model].cost.prompt : 0;
|
|
1126
1126
|
const completionCostPerThousand = model in openaiModels ? openaiModels[model].cost.completion : 0;
|
|
1127
|
-
const promptCost = getCostForTokens(inputTokenCount,
|
|
1128
|
-
const completionCost = getCostForTokens(outputTokensForCostCalculation,
|
|
1127
|
+
const promptCost = getCostForTokens(inputTokenCount, promptCostPerThousand);
|
|
1128
|
+
const completionCost = getCostForTokens(outputTokensForCostCalculation, completionCostPerThousand);
|
|
1129
1129
|
const cost = promptCost + completionCost;
|
|
1130
1130
|
if (usage) {
|
|
1131
1131
|
output['usage'] = {
|
|
@@ -1177,7 +1177,9 @@ export const ChatNodeBase = {
|
|
|
1177
1177
|
context.trace(`ChatNode failed, retrying: ${err.toString()}`);
|
|
1178
1178
|
const { retriesLeft } = err;
|
|
1179
1179
|
// Retry network errors
|
|
1180
|
-
if (err.toString().includes('terminated') ||
|
|
1180
|
+
if (err.toString().includes('terminated') ||
|
|
1181
|
+
originalError.toString().includes('terminated') ||
|
|
1182
|
+
err.toString().includes('fetch failed')) {
|
|
1181
1183
|
return;
|
|
1182
1184
|
}
|
|
1183
1185
|
if (!(err instanceof OpenAIError)) {
|
|
@@ -1256,7 +1258,7 @@ export function getChatNodeMessages(inputs) {
|
|
|
1256
1258
|
}
|
|
1257
1259
|
return { messages, systemPrompt };
|
|
1258
1260
|
}
|
|
1259
|
-
export function getCostForTokens(tokenCount,
|
|
1261
|
+
export function getCostForTokens(tokenCount, costPerThousand) {
|
|
1260
1262
|
return (tokenCount / 1000) * costPerThousand;
|
|
1261
1263
|
}
|
|
1262
1264
|
function audioFormatToMediaType(format) {
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import {} from '../NodeBase.js';
|
|
2
2
|
import { nanoid } from 'nanoid/non-secure';
|
|
3
3
|
import { NodeImpl } from '../NodeImpl.js';
|
|
4
|
-
import {} from '../DataValue.js';
|
|
5
4
|
import { dedent } from 'ts-dedent';
|
|
6
5
|
import {} from '../EditorDefinition.js';
|
|
7
6
|
import {} from '../NodeBodySpec.js';
|
|
@@ -2,7 +2,7 @@ import { nanoid } from 'nanoid';
|
|
|
2
2
|
import { NodeImpl } from '../NodeImpl.js';
|
|
3
3
|
import { dedent } from 'ts-dedent';
|
|
4
4
|
import { nodeDefinition } from '../NodeDefinition.js';
|
|
5
|
-
import {
|
|
5
|
+
import { coerceTypeOptional } from '../../utils/coerceType.js';
|
|
6
6
|
import * as cronParser from 'cron-parser';
|
|
7
7
|
export class CronNodeImpl extends NodeImpl {
|
|
8
8
|
static create() {
|
|
@@ -36,7 +36,7 @@ export class GetAllDatasetsNodeImpl extends NodeImpl {
|
|
|
36
36
|
getEditors() {
|
|
37
37
|
return [];
|
|
38
38
|
}
|
|
39
|
-
async process(
|
|
39
|
+
async process(context) {
|
|
40
40
|
const { datasetProvider } = context;
|
|
41
41
|
if (datasetProvider == null) {
|
|
42
42
|
throw new Error('datasetProvider is required');
|
|
@@ -7,7 +7,7 @@ import { coerceType } from '../../utils/coerceType.js';
|
|
|
7
7
|
import {} from '../ProcessContext.js';
|
|
8
8
|
import { dedent } from 'ts-dedent';
|
|
9
9
|
import {} from '../../index.js';
|
|
10
|
-
import { entries
|
|
10
|
+
import { entries } from '../../utils/typeSafety.js';
|
|
11
11
|
export class LoopControllerNodeImpl extends NodeImpl {
|
|
12
12
|
static create() {
|
|
13
13
|
const chartNode = {
|
|
@@ -25,7 +25,7 @@ export class ObjectNodeImpl extends NodeImpl {
|
|
|
25
25
|
};
|
|
26
26
|
return chartNode;
|
|
27
27
|
}
|
|
28
|
-
getInputDefinitions(
|
|
28
|
+
getInputDefinitions() {
|
|
29
29
|
// Extract inputs from text, everything like {{input}}
|
|
30
30
|
const inputNames = [...new Set(this.chartNode.data.jsonTemplate.match(/\{\{([^}]+)\}\}/g))];
|
|
31
31
|
return (inputNames?.map((inputName) => {
|
|
@@ -3,7 +3,6 @@ import { nanoid } from 'nanoid/non-secure';
|
|
|
3
3
|
import { NodeImpl } from '../NodeImpl.js';
|
|
4
4
|
import { nodeDefinition } from '../NodeDefinition.js';
|
|
5
5
|
import {} from '../GraphProcessor.js';
|
|
6
|
-
import {} from '../ProcessContext.js';
|
|
7
6
|
import { dedent } from 'ts-dedent';
|
|
8
7
|
import {} from '../../index.js';
|
|
9
8
|
export class RaceInputsNodeImpl extends NodeImpl {
|
|
@@ -67,7 +66,7 @@ export class RaceInputsNodeImpl extends NodeImpl {
|
|
|
67
66
|
group: ['Logic'],
|
|
68
67
|
};
|
|
69
68
|
}
|
|
70
|
-
async process(inputs
|
|
69
|
+
async process(inputs) {
|
|
71
70
|
// GraphProcessor handles most of the racing/aborting logic for us.
|
|
72
71
|
const value = Object.entries(inputs).find(([key, value]) => key.startsWith('input') && value !== undefined && value.type !== 'control-flow-excluded');
|
|
73
72
|
if (!value) {
|
|
@@ -3,7 +3,7 @@ import { nanoid } from 'nanoid/non-secure';
|
|
|
3
3
|
import { dedent } from 'ts-dedent';
|
|
4
4
|
import { nodeDefinition } from '../NodeDefinition.js';
|
|
5
5
|
import { getInputOrData, coerceType, newId, inferType } from '../../utils/index.js';
|
|
6
|
-
import {
|
|
6
|
+
import { unwrapDataValue } from '../DataValue.js';
|
|
7
7
|
export class ReplaceDatasetNodeImpl extends NodeImpl {
|
|
8
8
|
static create() {
|
|
9
9
|
return {
|
|
@@ -3,7 +3,6 @@ import { nanoid } from 'nanoid/non-secure';
|
|
|
3
3
|
import { NodeImpl } from '../NodeImpl.js';
|
|
4
4
|
import { nodeDefinition } from '../NodeDefinition.js';
|
|
5
5
|
import {} from '../GraphProcessor.js';
|
|
6
|
-
import { entries } from '../../utils/typeSafety.js';
|
|
7
6
|
import { dedent } from 'ts-dedent';
|
|
8
7
|
import {} from '../EditorDefinition.js';
|
|
9
8
|
import {} from '../../index.js';
|
|
@@ -85,7 +85,7 @@ export class SplitNodeImpl extends NodeImpl {
|
|
|
85
85
|
}
|
|
86
86
|
return normalized;
|
|
87
87
|
}
|
|
88
|
-
async process(inputs
|
|
88
|
+
async process(inputs) {
|
|
89
89
|
const delimiter = getInputOrData(this.data, inputs, 'delimiter');
|
|
90
90
|
const normalizedDelimiter = this.data.regex ? new RegExp(delimiter) : handleEscapeCharacters(delimiter);
|
|
91
91
|
const stringToSplit = coerceType(inputs['string'], 'string');
|
|
@@ -12,7 +12,6 @@ import {} from '../ProcessContext.js';
|
|
|
12
12
|
import {} from '../../index.js';
|
|
13
13
|
import { dedent } from 'ts-dedent';
|
|
14
14
|
import { getError } from '../../utils/errors.js';
|
|
15
|
-
import { match } from 'ts-pattern';
|
|
16
15
|
export class SubGraphNodeImpl extends NodeImpl {
|
|
17
16
|
static create() {
|
|
18
17
|
const chartNode = {
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import {} from '../NodeBase.js';
|
|
2
|
+
import {} from '../DataValue.js';
|
|
3
|
+
import { NodeImpl } from '../NodeImpl.js';
|
|
4
|
+
import { nodeDefinition } from '../NodeDefinition.js';
|
|
5
|
+
import { nanoid } from 'nanoid/non-secure';
|
|
6
|
+
import { getInputOrData } from '../../utils/index.js';
|
|
7
|
+
import {} from '../ProcessContext.js';
|
|
8
|
+
import { dedent } from 'ts-dedent';
|
|
9
|
+
import { coerceTypeOptional } from '../../utils/coerceType.js';
|
|
10
|
+
import { extractInterpolationVariables, interpolate } from '../../utils/interpolation.js';
|
|
11
|
+
const mimeToExtension = {
|
|
12
|
+
'image/jpeg': 'jpg',
|
|
13
|
+
'image/png': 'png',
|
|
14
|
+
'image/gif': 'gif',
|
|
15
|
+
'audio/wav': 'wav',
|
|
16
|
+
'audio/mpeg ': 'mp3',
|
|
17
|
+
'audio/mp3': 'mp3',
|
|
18
|
+
'audio/ogg': 'ogg'
|
|
19
|
+
};
|
|
20
|
+
export class WriteFileNodeImpl extends NodeImpl {
|
|
21
|
+
static create() {
|
|
22
|
+
return {
|
|
23
|
+
id: nanoid(),
|
|
24
|
+
type: 'writeFile',
|
|
25
|
+
title: 'Write File',
|
|
26
|
+
visualData: { x: 0, y: 0, width: 250 },
|
|
27
|
+
data: {
|
|
28
|
+
path: '',
|
|
29
|
+
asBinary: false,
|
|
30
|
+
usePathOutput: true,
|
|
31
|
+
overwriteExistingFile: false,
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
getInputDefinitions() {
|
|
36
|
+
const inputDefinitions = [
|
|
37
|
+
{
|
|
38
|
+
id: 'content',
|
|
39
|
+
title: 'Content',
|
|
40
|
+
dataType: this.data.asBinary ? 'binary' : 'string',
|
|
41
|
+
},
|
|
42
|
+
];
|
|
43
|
+
if (this.chartNode.data.usePathOutput) {
|
|
44
|
+
inputDefinitions.push({
|
|
45
|
+
id: 'path',
|
|
46
|
+
title: 'Path',
|
|
47
|
+
dataType: 'string',
|
|
48
|
+
coerced: false,
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
return inputDefinitions;
|
|
52
|
+
}
|
|
53
|
+
getOutputDefinitions() {
|
|
54
|
+
return [
|
|
55
|
+
{
|
|
56
|
+
id: 'outputContent',
|
|
57
|
+
title: 'Content',
|
|
58
|
+
dataType: this.data.asBinary ? 'binary' : 'string',
|
|
59
|
+
},
|
|
60
|
+
];
|
|
61
|
+
}
|
|
62
|
+
static getUIData() {
|
|
63
|
+
return {
|
|
64
|
+
infoBoxBody: dedent `
|
|
65
|
+
Writes the contents of the specified file and outputs it as a string.
|
|
66
|
+
`,
|
|
67
|
+
infoBoxTitle: 'Write File Node',
|
|
68
|
+
contextMenuTitle: 'Write File',
|
|
69
|
+
group: ['Input/Output'],
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
getEditors() {
|
|
73
|
+
return [
|
|
74
|
+
{
|
|
75
|
+
type: 'string',
|
|
76
|
+
label: 'Path',
|
|
77
|
+
dataKey: 'path',
|
|
78
|
+
useInputToggleDataKey: 'usePathOutput',
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
type: 'toggle',
|
|
82
|
+
label: 'Overwrite Existing File instead of making additional copy',
|
|
83
|
+
dataKey: 'overwriteExistingFile',
|
|
84
|
+
},
|
|
85
|
+
{
|
|
86
|
+
type: 'toggle',
|
|
87
|
+
label: 'Read as Binary',
|
|
88
|
+
dataKey: 'asBinary',
|
|
89
|
+
},
|
|
90
|
+
];
|
|
91
|
+
}
|
|
92
|
+
getBody() {
|
|
93
|
+
return dedent `
|
|
94
|
+
${this.data.asBinary ? 'Write as Binary' : 'Write as Text'}
|
|
95
|
+
${this.data.usePathOutput ? '' : `Path: ${this.data.path}`}
|
|
96
|
+
`;
|
|
97
|
+
}
|
|
98
|
+
async process(inputData, context) {
|
|
99
|
+
const { nativeApi } = context;
|
|
100
|
+
const inputContent = inputData['content'] ?? { type: 'any', value: undefined };
|
|
101
|
+
if (nativeApi == null) {
|
|
102
|
+
throw new Error('This node requires a native API to run.');
|
|
103
|
+
}
|
|
104
|
+
const currentPath = context.project.metadata?.path;
|
|
105
|
+
if (!currentPath) {
|
|
106
|
+
throw new Error('Project metadata is missing path.');
|
|
107
|
+
}
|
|
108
|
+
const folderPath = currentPath.replace('.rivet-project', '.rivet-files');
|
|
109
|
+
await nativeApi.createdir(folderPath, true);
|
|
110
|
+
let path = getInputOrData(this.chartNode.data, inputData, 'path');
|
|
111
|
+
const interpolations = extractInterpolationVariables(path);
|
|
112
|
+
if (interpolations.includes('ext')) {
|
|
113
|
+
let extension = 'txt';
|
|
114
|
+
if (this.data.asBinary) {
|
|
115
|
+
if (inputContent.type === 'audio') {
|
|
116
|
+
extension = mimeToExtension[inputContent.value.mediaType ?? ''] ?? 'audio';
|
|
117
|
+
}
|
|
118
|
+
else if (inputContent.type === 'image') {
|
|
119
|
+
extension = mimeToExtension[inputContent.value.mediaType ?? ''] ?? 'image';
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
extension = 'binary';
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
path = interpolate(path, { ext: extension });
|
|
126
|
+
}
|
|
127
|
+
let fileDestination = await nativeApi.join(folderPath, path);
|
|
128
|
+
if (!this.data.overwriteExistingFile) {
|
|
129
|
+
fileDestination = await nativeApi.uniqueFilename(fileDestination);
|
|
130
|
+
}
|
|
131
|
+
if (this.data.asBinary) {
|
|
132
|
+
const content = coerceTypeOptional(inputContent, 'binary') ?? new Uint8Array();
|
|
133
|
+
await nativeApi.writeBinaryFile(fileDestination, content);
|
|
134
|
+
return {
|
|
135
|
+
['outputContent']: { type: 'binary', value: content },
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
const content = coerceTypeOptional(inputContent, 'string') ?? '';
|
|
140
|
+
await nativeApi.writeTextFile(fileDestination, content);
|
|
141
|
+
return {
|
|
142
|
+
['outputContent']: { type: 'string', value: content },
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
export const writeFileNode = nodeDefinition(WriteFileNodeImpl, 'Write File');
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import {} from './BaseDir.js';
|
|
2
2
|
import {} from './NativeApi.js';
|
|
3
3
|
export class BrowserNativeApi {
|
|
4
|
+
createdir(_path, _recursive, _baseDir) {
|
|
5
|
+
throw new Error('Method not implemented.');
|
|
6
|
+
}
|
|
4
7
|
readdir(_path, _baseDir) {
|
|
5
8
|
throw new Error('Method not implemented.');
|
|
6
9
|
}
|
|
@@ -13,7 +16,19 @@ export class BrowserNativeApi {
|
|
|
13
16
|
writeTextFile(_path, _data, _baseDir) {
|
|
14
17
|
throw new Error('Method not implemented.');
|
|
15
18
|
}
|
|
16
|
-
|
|
19
|
+
writeBinaryFile(_path, _data, _baseDir) {
|
|
20
|
+
throw new Error('Method not implemented.');
|
|
21
|
+
}
|
|
22
|
+
exists(_path, _baseDir) {
|
|
23
|
+
throw new Error('Method not implemented.');
|
|
24
|
+
}
|
|
25
|
+
join(..._paths) {
|
|
26
|
+
throw new Error('Method not implemented.');
|
|
27
|
+
}
|
|
28
|
+
uniqueFilename(_path, _baseDir) {
|
|
29
|
+
throw new Error('Method not implemented.');
|
|
30
|
+
}
|
|
31
|
+
exec() {
|
|
17
32
|
throw new Error('Method not supported.');
|
|
18
33
|
}
|
|
19
34
|
}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import { ChatNodeImpl,
|
|
1
|
+
import { ChatNodeImpl, globalRivetNodeRegistry, } from '../../../index.js';
|
|
2
2
|
import { omit } from 'lodash-es';
|
|
3
3
|
import { dedent } from 'ts-dedent';
|
|
4
4
|
import { coerceTypeOptional } from '../../../utils/coerceType.js';
|
|
5
5
|
import { pluginNodeDefinition } from '../../../model/NodeDefinition.js';
|
|
6
6
|
// Temporary
|
|
7
|
-
const cache = new Map();
|
|
7
|
+
// const cache = new Map<string, Outputs>();
|
|
8
8
|
const registry = globalRivetNodeRegistry;
|
|
9
9
|
;
|
|
10
10
|
class ChatAidonNodeImpl extends ChatNodeImpl {
|
|
@@ -91,7 +91,7 @@ class ChatAidonNodeImpl extends ChatNodeImpl {
|
|
|
91
91
|
};
|
|
92
92
|
}
|
|
93
93
|
const fullUrl = schemaDetail.url + path;
|
|
94
|
-
const bodyContent = parsedArgs.requestBody
|
|
94
|
+
const bodyContent = parsedArgs.requestBody ?? parsedArgs;
|
|
95
95
|
const requestInit = {
|
|
96
96
|
method: "POST",
|
|
97
97
|
headers,
|
|
@@ -102,14 +102,12 @@ export async function* streamChatCompletions({ apiEndpoint, apiKey, signal, ...r
|
|
|
102
102
|
signal: signal ?? defaultSignal,
|
|
103
103
|
});
|
|
104
104
|
let hadChunks = false;
|
|
105
|
-
let nextDataType;
|
|
106
105
|
for await (const chunk of response.events()) {
|
|
107
106
|
hadChunks = true;
|
|
108
107
|
if (chunk === '[DONE]') {
|
|
109
108
|
return;
|
|
110
109
|
}
|
|
111
110
|
else if (/\[\w+\]/.test(chunk)) {
|
|
112
|
-
nextDataType = chunk.slice(1, -1);
|
|
113
111
|
continue;
|
|
114
112
|
}
|
|
115
113
|
let data;
|
|
@@ -170,14 +168,12 @@ export async function* streamMessageApi({ apiEndpoint, apiKey, signal, beta, ...
|
|
|
170
168
|
signal: signal ?? defaultSignal,
|
|
171
169
|
});
|
|
172
170
|
let hadChunks = false;
|
|
173
|
-
let nextDataType;
|
|
174
171
|
for await (const chunk of response.events()) {
|
|
175
172
|
hadChunks = true;
|
|
176
173
|
if (chunk === '[message_stop]') {
|
|
177
174
|
return;
|
|
178
175
|
}
|
|
179
176
|
else if (/^\[\w+\]$/.test(chunk)) {
|
|
180
|
-
nextDataType = chunk.slice(1, -1);
|
|
181
177
|
continue;
|
|
182
178
|
}
|
|
183
179
|
let data;
|
|
@@ -321,9 +321,6 @@ export const ChatAnthropicNodeImpl = {
|
|
|
321
321
|
prompt += '\n\nAssistant:';
|
|
322
322
|
// Get the "System" prompt input for Claude 3 models
|
|
323
323
|
const system = data.model.startsWith('claude-3') ? getSystemPrompt(inputs) : undefined;
|
|
324
|
-
const systemInput = inputs['system'];
|
|
325
|
-
const includesCacheBreakpoint = rivetChatMessages.some((m) => m.isCacheBreakpoint) ||
|
|
326
|
-
(systemInput?.type === 'chat-message' && systemInput.value.isCacheBreakpoint);
|
|
327
324
|
let { maxTokens } = data;
|
|
328
325
|
const tokenizerInfo = {
|
|
329
326
|
node: context.node,
|
|
@@ -4,7 +4,7 @@ import {} from '../../index.js';
|
|
|
4
4
|
import { getClient, getLemurParams, lemurEditorDefinitions, lemurInputDefinitions, } from './lemurHelpers.js';
|
|
5
5
|
import { coerceType } from '../../utils/coerceType.js';
|
|
6
6
|
import { pluginNodeDefinition } from '../../model/NodeDefinition.js';
|
|
7
|
-
import {
|
|
7
|
+
import {} from 'assemblyai';
|
|
8
8
|
export const LemurQaNodeImpl = {
|
|
9
9
|
create() {
|
|
10
10
|
const chartNode = {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { nanoid } from 'nanoid/non-secure';
|
|
2
2
|
import { dedent } from 'ts-dedent';
|
|
3
|
-
import {
|
|
3
|
+
import {} from 'assemblyai';
|
|
4
4
|
import {} from '../../index.js';
|
|
5
5
|
import { getClient, getLemurParams, lemurEditorDefinitions, lemurInputDefinitions, } from './lemurHelpers.js';
|
|
6
6
|
import { pluginNodeDefinition } from '../../model/NodeDefinition.js';
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { P, match } from 'ts-pattern';
|
|
1
|
+
import {} from '@google/generative-ai';
|
|
3
2
|
export const googleModelsDeprecated = {
|
|
4
3
|
'gemini-pro': {
|
|
5
4
|
maxTokens: 32760,
|
|
@@ -35,6 +34,14 @@ export const generativeAiGoogleModels = {
|
|
|
35
34
|
},
|
|
36
35
|
displayName: 'Gemini 2.0 Pro',
|
|
37
36
|
},
|
|
37
|
+
'gemini-2.5-pro-exp-03-25': {
|
|
38
|
+
maxTokens: 1000000,
|
|
39
|
+
cost: {
|
|
40
|
+
prompt: 0, // Unknown
|
|
41
|
+
completion: 0, // Unknown
|
|
42
|
+
},
|
|
43
|
+
displayName: 'Gemini 2.5 Pro Experimental',
|
|
44
|
+
},
|
|
38
45
|
'gemini-2.0-flash-lite-preview-02-05': {
|
|
39
46
|
maxTokens: 1048576,
|
|
40
47
|
cost: {
|
|
@@ -121,7 +128,7 @@ export async function* streamGenerativeAi({ apiKey, model, systemPrompt, prompt,
|
|
|
121
128
|
outChunk.function_calls = functionCalls;
|
|
122
129
|
}
|
|
123
130
|
if (chunk.candidates) {
|
|
124
|
-
outChunk.completion = chunk.candidates[0]?.content?.parts[0]?.text;
|
|
131
|
+
outChunk.completion = chunk.candidates[0]?.content?.parts?.[0]?.text;
|
|
125
132
|
outChunk.finish_reason = chunk.candidates[0]?.finishReason;
|
|
126
133
|
}
|
|
127
134
|
if (outChunk.completion || outChunk.function_calls) {
|
|
@@ -130,7 +137,6 @@ export async function* streamGenerativeAi({ apiKey, model, systemPrompt, prompt,
|
|
|
130
137
|
}
|
|
131
138
|
}
|
|
132
139
|
export async function* streamChatCompletions({ project, location, applicationCredentials, model, signal, max_output_tokens, temperature, top_p, top_k, prompt, }) {
|
|
133
|
-
const defaultSignal = new AbortController().signal;
|
|
134
140
|
// If you import normally, the Google auth library throws a fit.
|
|
135
141
|
const { VertexAI } = await import('@google-cloud/vertexai');
|
|
136
142
|
// Can't find a way to pass the credentials path in
|
|
@@ -11,7 +11,7 @@ import { uint8ArrayToBase64 } from '../../../utils/base64.js';
|
|
|
11
11
|
import { pluginNodeDefinition } from '../../../model/NodeDefinition.js';
|
|
12
12
|
import { getScalarTypeOf, isArrayDataValue } from '../../../model/DataValue.js';
|
|
13
13
|
import { getInputOrData } from '../../../utils/inputs.js';
|
|
14
|
-
import {
|
|
14
|
+
import { SchemaType, } from '@google/generative-ai';
|
|
15
15
|
import { mapValues } from 'lodash-es';
|
|
16
16
|
// Temporary
|
|
17
17
|
const cache = new Map();
|
|
@@ -254,7 +254,10 @@ export const ChatGoogleNodeImpl = {
|
|
|
254
254
|
}
|
|
255
255
|
}
|
|
256
256
|
return {
|
|
257
|
-
role: message.type
|
|
257
|
+
role: match(message.type)
|
|
258
|
+
.with('user', () => 'user')
|
|
259
|
+
.with('assistant', () => 'model')
|
|
260
|
+
.exhaustive(),
|
|
258
261
|
parts,
|
|
259
262
|
};
|
|
260
263
|
}
|
|
@@ -486,8 +489,11 @@ export const ChatGoogleNodeImpl = {
|
|
|
486
489
|
});
|
|
487
490
|
}
|
|
488
491
|
catch (error) {
|
|
489
|
-
|
|
490
|
-
|
|
492
|
+
const raisedError = getError(error);
|
|
493
|
+
context.trace(raisedError.stack ?? 'Missing stack');
|
|
494
|
+
const err = new Error(`Error processing ChatGoogleNode: ${raisedError.message}`);
|
|
495
|
+
err.cause = raisedError;
|
|
496
|
+
throw err;
|
|
491
497
|
}
|
|
492
498
|
},
|
|
493
499
|
};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import {} from '../../../index.js';
|
|
2
|
-
import {
|
|
2
|
+
import { newId, coerceTypeOptional, getInputOrData, coerceType } from '../../../utils/index.js';
|
|
3
3
|
import { interpolate } from '../../../utils/interpolation.js';
|
|
4
4
|
import { pluginNodeDefinition } from '../../../model/NodeDefinition.js';
|
|
5
5
|
import { mapValues } from 'lodash-es';
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { nanoid } from 'nanoid/non-secure';
|
|
2
2
|
import {} from '../index.js';
|
|
3
3
|
import Emittery from 'emittery';
|
|
4
|
+
import { uint8ArrayToBase64Sync, base64ToUint8Array } from '../utils/base64.js';
|
|
5
|
+
import { isPlainObject } from 'lodash-es';
|
|
4
6
|
const toRecordedEventMap = {
|
|
5
7
|
graphStart: ({ graph, inputs }) => ({ graphId: graph.metadata.id, inputs }),
|
|
6
8
|
graphFinish: ({ graph, outputs }) => ({ graphId: graph.metadata.id, outputs }),
|
|
@@ -93,6 +95,25 @@ function toRecordedEvent(event, data) {
|
|
|
93
95
|
ts: Date.now(),
|
|
94
96
|
};
|
|
95
97
|
}
|
|
98
|
+
function mapValuesDeep(obj, fn) {
|
|
99
|
+
if (Array.isArray(obj)) {
|
|
100
|
+
return obj.map((value) => {
|
|
101
|
+
if (isPlainObject(value) || Array.isArray(value)) {
|
|
102
|
+
return mapValuesDeep(value, fn);
|
|
103
|
+
}
|
|
104
|
+
return fn(value);
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
if (isPlainObject(obj)) {
|
|
108
|
+
return Object.fromEntries(Object.entries(obj).map(([key, value]) => {
|
|
109
|
+
if (isPlainObject(value) || Array.isArray(value)) {
|
|
110
|
+
return [key, mapValuesDeep(value, fn)];
|
|
111
|
+
}
|
|
112
|
+
return [key, fn(value)];
|
|
113
|
+
}));
|
|
114
|
+
}
|
|
115
|
+
return fn(obj);
|
|
116
|
+
}
|
|
96
117
|
export class ExecutionRecorder {
|
|
97
118
|
#events = [];
|
|
98
119
|
recordingId;
|
|
@@ -109,7 +130,7 @@ export class ExecutionRecorder {
|
|
|
109
130
|
off = undefined;
|
|
110
131
|
once = undefined;
|
|
111
132
|
recordSocket(channel) {
|
|
112
|
-
return new Promise((resolve
|
|
133
|
+
return new Promise((resolve) => {
|
|
113
134
|
this.recordingId = nanoid();
|
|
114
135
|
const listener = (event) => {
|
|
115
136
|
const { message, data } = JSON.parse(event.data);
|
|
@@ -167,15 +188,45 @@ export class ExecutionRecorder {
|
|
|
167
188
|
if (serializedRecording.version !== 1) {
|
|
168
189
|
throw new Error('Unsupported serialized events version');
|
|
169
190
|
}
|
|
170
|
-
|
|
171
|
-
|
|
191
|
+
const recording = mapValuesDeep(serializedRecording.recording, (val) => {
|
|
192
|
+
if (typeof val === 'string' && val.startsWith('$ASSET:')) {
|
|
193
|
+
const id = val.slice('$ASSET:'.length);
|
|
194
|
+
const asset = serializedRecording.assets?.[id];
|
|
195
|
+
if (asset) {
|
|
196
|
+
return new Uint8Array(base64ToUint8Array(asset));
|
|
197
|
+
}
|
|
198
|
+
else {
|
|
199
|
+
return val;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return val;
|
|
203
|
+
});
|
|
204
|
+
recorder.recordingId = recording.recordingId;
|
|
205
|
+
recorder.#events = recording.events;
|
|
172
206
|
return recorder;
|
|
173
207
|
}
|
|
174
208
|
serialize() {
|
|
175
209
|
const serialized = {
|
|
176
210
|
version: 1,
|
|
177
211
|
recording: this.getRecording(),
|
|
212
|
+
assets: {},
|
|
178
213
|
};
|
|
214
|
+
serialized.recording = mapValuesDeep(serialized.recording, (val) => {
|
|
215
|
+
if (val instanceof Uint8Array) {
|
|
216
|
+
const asString = uint8ArrayToBase64Sync(val);
|
|
217
|
+
const existingAsset = Object.entries(serialized.assets).find(([, asset]) => asset === asString);
|
|
218
|
+
if (!existingAsset) {
|
|
219
|
+
const id = nanoid();
|
|
220
|
+
serialized.assets[id] = asString;
|
|
221
|
+
return `$ASSET:${id}`;
|
|
222
|
+
}
|
|
223
|
+
else {
|
|
224
|
+
const [id] = existingAsset;
|
|
225
|
+
return `$ASSET:${id}`;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
return val;
|
|
229
|
+
});
|
|
179
230
|
return JSON.stringify(serialized);
|
|
180
231
|
}
|
|
181
232
|
}
|
package/dist/esm/utils/base64.js
CHANGED
|
@@ -14,6 +14,19 @@ export async function uint8ArrayToBase64(uint8Array) {
|
|
|
14
14
|
return dataUrl.split(',')[1];
|
|
15
15
|
}
|
|
16
16
|
}
|
|
17
|
+
export function uint8ArrayToBase64Sync(uint8Array) {
|
|
18
|
+
if (typeof window === 'undefined') {
|
|
19
|
+
// Node executor
|
|
20
|
+
return Buffer.from(uint8Array).toString('base64');
|
|
21
|
+
}
|
|
22
|
+
else {
|
|
23
|
+
// Browser executor
|
|
24
|
+
const binary = Array.from(uint8Array)
|
|
25
|
+
.map((byte) => String.fromCharCode(byte))
|
|
26
|
+
.join('');
|
|
27
|
+
return btoa(binary);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
17
30
|
export function base64ToUint8Array(base64) {
|
|
18
31
|
const binaryString = atob(base64);
|
|
19
32
|
const len = binaryString.length;
|