@aigne/core 1.0.3-beta.1 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cjs/index.d.ts +12 -9
- package/lib/cjs/index.js +12 -9
- package/lib/cjs/llm-models/gemini-llm-model.d.ts +26 -0
- package/lib/cjs/llm-models/gemini-llm-model.js +195 -0
- package/lib/cjs/llm-models/openai-llm-model.d.ts +25 -0
- package/lib/cjs/llm-models/openai-llm-model.js +120 -0
- package/lib/cjs/runtime.d.ts +55 -0
- package/lib/cjs/runtime.js +144 -0
- package/lib/cjs/utils/index.d.ts +1 -0
- package/lib/cjs/utils/index.js +1 -0
- package/lib/cjs/utils/partial.d.ts +3 -0
- package/lib/cjs/utils/partial.js +2 -0
- package/lib/dts/index.d.ts +12 -9
- package/lib/dts/llm-models/gemini-llm-model.d.ts +26 -0
- package/lib/dts/llm-models/openai-llm-model.d.ts +25 -0
- package/lib/dts/runtime.d.ts +55 -0
- package/lib/dts/utils/index.d.ts +1 -0
- package/lib/dts/utils/partial.d.ts +3 -0
- package/lib/esm/index.d.ts +12 -9
- package/lib/esm/index.js +12 -9
- package/lib/esm/llm-models/gemini-llm-model.d.ts +26 -0
- package/lib/esm/llm-models/gemini-llm-model.js +191 -0
- package/lib/esm/llm-models/openai-llm-model.d.ts +25 -0
- package/lib/esm/llm-models/openai-llm-model.js +113 -0
- package/lib/esm/runtime.d.ts +55 -0
- package/lib/esm/runtime.js +141 -0
- package/lib/esm/utils/index.d.ts +1 -0
- package/lib/esm/utils/index.js +1 -0
- package/lib/esm/utils/partial.d.ts +3 -0
- package/lib/esm/utils/partial.js +1 -0
- package/package.json +7 -1
package/lib/cjs/index.d.ts
CHANGED
|
@@ -1,18 +1,21 @@
|
|
|
1
|
-
export * from "./
|
|
1
|
+
export * from "./agent";
|
|
2
2
|
export * from "./constants";
|
|
3
|
+
export * from "./context";
|
|
3
4
|
export * from "./definitions/data-type";
|
|
4
5
|
export * from "./definitions/data-type-schema";
|
|
5
|
-
export * from "./definitions/open-api";
|
|
6
6
|
export * from "./definitions/memory";
|
|
7
|
-
export * from "./
|
|
8
|
-
export * from "./runnable";
|
|
9
|
-
export * from "./agent";
|
|
10
|
-
export * from "./pipeline-agent";
|
|
11
|
-
export * from "./llm-agent";
|
|
12
|
-
export * from "./llm-model";
|
|
7
|
+
export * from "./definitions/open-api";
|
|
13
8
|
export * from "./function-agent";
|
|
14
9
|
export * from "./function-runner";
|
|
10
|
+
export * from "./llm-agent";
|
|
15
11
|
export * from "./llm-decision-agent";
|
|
12
|
+
export * from "./llm-model";
|
|
13
|
+
export * from "./llm-models/gemini-llm-model";
|
|
14
|
+
export * from "./llm-models/openai-llm-model";
|
|
16
15
|
export * from "./local-function-agent";
|
|
17
|
-
export * from "./open-api-agent";
|
|
18
16
|
export * from "./memorable";
|
|
17
|
+
export * from "./open-api-agent";
|
|
18
|
+
export * from "./pipeline-agent";
|
|
19
|
+
export * from "./runnable";
|
|
20
|
+
export * from "./runtime";
|
|
21
|
+
export * from "./utils";
|
package/lib/cjs/index.js
CHANGED
|
@@ -14,21 +14,24 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
__exportStar(require("./
|
|
17
|
+
__exportStar(require("./agent"), exports);
|
|
18
18
|
__exportStar(require("./constants"), exports);
|
|
19
|
+
__exportStar(require("./context"), exports);
|
|
19
20
|
__exportStar(require("./definitions/data-type"), exports);
|
|
20
21
|
__exportStar(require("./definitions/data-type-schema"), exports);
|
|
21
|
-
__exportStar(require("./definitions/open-api"), exports);
|
|
22
22
|
__exportStar(require("./definitions/memory"), exports);
|
|
23
|
-
__exportStar(require("./
|
|
24
|
-
__exportStar(require("./runnable"), exports);
|
|
25
|
-
__exportStar(require("./agent"), exports);
|
|
26
|
-
__exportStar(require("./pipeline-agent"), exports);
|
|
27
|
-
__exportStar(require("./llm-agent"), exports);
|
|
28
|
-
__exportStar(require("./llm-model"), exports);
|
|
23
|
+
__exportStar(require("./definitions/open-api"), exports);
|
|
29
24
|
__exportStar(require("./function-agent"), exports);
|
|
30
25
|
__exportStar(require("./function-runner"), exports);
|
|
26
|
+
__exportStar(require("./llm-agent"), exports);
|
|
31
27
|
__exportStar(require("./llm-decision-agent"), exports);
|
|
28
|
+
__exportStar(require("./llm-model"), exports);
|
|
29
|
+
__exportStar(require("./llm-models/gemini-llm-model"), exports);
|
|
30
|
+
__exportStar(require("./llm-models/openai-llm-model"), exports);
|
|
32
31
|
__exportStar(require("./local-function-agent"), exports);
|
|
33
|
-
__exportStar(require("./open-api-agent"), exports);
|
|
34
32
|
__exportStar(require("./memorable"), exports);
|
|
33
|
+
__exportStar(require("./open-api-agent"), exports);
|
|
34
|
+
__exportStar(require("./pipeline-agent"), exports);
|
|
35
|
+
__exportStar(require("./runnable"), exports);
|
|
36
|
+
__exportStar(require("./runtime"), exports);
|
|
37
|
+
__exportStar(require("./utils"), exports);
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
|
+
export declare class GeminiLLMModel extends LLMModel {
|
|
3
|
+
config: {
|
|
4
|
+
apiKey: string;
|
|
5
|
+
model: string;
|
|
6
|
+
};
|
|
7
|
+
constructor(config: {
|
|
8
|
+
apiKey: string;
|
|
9
|
+
model: string;
|
|
10
|
+
});
|
|
11
|
+
private client;
|
|
12
|
+
private model;
|
|
13
|
+
process(input: LLMModelInputs): AsyncGenerator<{
|
|
14
|
+
$text: string;
|
|
15
|
+
delta: {
|
|
16
|
+
toolCalls: {
|
|
17
|
+
id?: string;
|
|
18
|
+
type?: "function";
|
|
19
|
+
function?: {
|
|
20
|
+
name?: string;
|
|
21
|
+
arguments?: string;
|
|
22
|
+
};
|
|
23
|
+
}[];
|
|
24
|
+
};
|
|
25
|
+
}, void, unknown>;
|
|
26
|
+
}
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.GeminiLLMModel = void 0;
|
|
4
|
+
const generative_ai_1 = require("@google/generative-ai");
|
|
5
|
+
const nanoid_1 = require("nanoid");
|
|
6
|
+
const llm_model_1 = require("../llm-model");
|
|
7
|
+
const is_non_nullable_1 = require("../utils/is-non-nullable");
|
|
8
|
+
class GeminiLLMModel extends llm_model_1.LLMModel {
|
|
9
|
+
config;
|
|
10
|
+
constructor(config) {
|
|
11
|
+
super();
|
|
12
|
+
this.config = config;
|
|
13
|
+
this.client = new generative_ai_1.GoogleGenerativeAI(this.config.apiKey);
|
|
14
|
+
this.model = this.client.getGenerativeModel({ model: this.config.model });
|
|
15
|
+
}
|
|
16
|
+
client;
|
|
17
|
+
model;
|
|
18
|
+
async *process(input) {
|
|
19
|
+
const res = await this.model.generateContentStream({
|
|
20
|
+
contents: await contentsFromInputMessages(input.messages),
|
|
21
|
+
tools: toolsFromInputTools(input.tools),
|
|
22
|
+
toolConfig: toolConfigFromInputToolChoice(input.toolChoice),
|
|
23
|
+
generationConfig: generationConfigFromInput(input),
|
|
24
|
+
});
|
|
25
|
+
const toolCalls = [];
|
|
26
|
+
for await (const chunk of res.stream) {
|
|
27
|
+
const choice = chunk.candidates?.[0];
|
|
28
|
+
if (choice?.content.parts) {
|
|
29
|
+
const calls = choice.content.parts
|
|
30
|
+
.filter((i) => typeof i.functionCall === "object")
|
|
31
|
+
.map((i) => ({
|
|
32
|
+
id: (0, nanoid_1.nanoid)(),
|
|
33
|
+
type: "function",
|
|
34
|
+
function: {
|
|
35
|
+
name: i.functionCall.name,
|
|
36
|
+
arguments: JSON.stringify(i.functionCall.args),
|
|
37
|
+
},
|
|
38
|
+
}));
|
|
39
|
+
if (calls.length) {
|
|
40
|
+
toolCalls.push(...calls);
|
|
41
|
+
}
|
|
42
|
+
yield {
|
|
43
|
+
$text: choice.content.parts
|
|
44
|
+
.map((i) => i.text)
|
|
45
|
+
.filter(Boolean)
|
|
46
|
+
.join(""),
|
|
47
|
+
delta: { toolCalls },
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
else if (chunk.promptFeedback?.blockReason) {
|
|
51
|
+
const { blockReason, blockReasonMessage } = chunk.promptFeedback;
|
|
52
|
+
throw new Error(["PROMPT_BLOCKED", blockReason, blockReasonMessage]
|
|
53
|
+
.filter(Boolean)
|
|
54
|
+
.join(" "));
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
exports.GeminiLLMModel = GeminiLLMModel;
|
|
60
|
+
async function contentsFromInputMessages(messages) {
|
|
61
|
+
const contents = [];
|
|
62
|
+
let prevMsg;
|
|
63
|
+
while (messages.length) {
|
|
64
|
+
const message = messages.shift();
|
|
65
|
+
if (!prevMsg || message.role !== prevMsg.role) {
|
|
66
|
+
prevMsg = {
|
|
67
|
+
role: message.role === "assistant" ? "model" : "user",
|
|
68
|
+
parts: [],
|
|
69
|
+
};
|
|
70
|
+
contents.push(prevMsg);
|
|
71
|
+
}
|
|
72
|
+
if (typeof message.content === "string") {
|
|
73
|
+
prevMsg.parts.push({ text: message.content });
|
|
74
|
+
}
|
|
75
|
+
else if (Array.isArray(message.content)) {
|
|
76
|
+
const res = await Promise.all(message.content.map(resolveContent));
|
|
77
|
+
prevMsg.parts.push(...res.filter(is_non_nullable_1.isNonNullable));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
return contents;
|
|
81
|
+
}
|
|
82
|
+
async function resolveContent(content) {
|
|
83
|
+
if (typeof content === "string")
|
|
84
|
+
return { text: content };
|
|
85
|
+
if (content.type === "text" && content.text) {
|
|
86
|
+
return { text: content.text };
|
|
87
|
+
}
|
|
88
|
+
if (content.type === "image_url") {
|
|
89
|
+
const url = content.imageUrl.url;
|
|
90
|
+
return { fileData: { mimeType: "image/jpeg", fileUri: url } };
|
|
91
|
+
}
|
|
92
|
+
return undefined;
|
|
93
|
+
}
|
|
94
|
+
function parameterSchemaToFunctionDeclarationSchema(schema) {
|
|
95
|
+
if (schema.type === "object") {
|
|
96
|
+
return {
|
|
97
|
+
type: generative_ai_1.SchemaType.OBJECT,
|
|
98
|
+
description: schema.description,
|
|
99
|
+
properties: Object.fromEntries(Object.entries(schema.properties).map(([key, s]) => [
|
|
100
|
+
key,
|
|
101
|
+
openAISchemaToGeminiSchema(s),
|
|
102
|
+
])),
|
|
103
|
+
required: schema.required,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
throw new Error(`Unsupported schema type ${schema.type}`);
|
|
107
|
+
}
|
|
108
|
+
function generationConfigFromInput(input) {
|
|
109
|
+
const jsonSchema = input.responseFormat?.type === "json_schema"
|
|
110
|
+
? input.responseFormat.jsonSchema
|
|
111
|
+
: undefined;
|
|
112
|
+
return {
|
|
113
|
+
temperature: input.modelOptions?.temperature,
|
|
114
|
+
topP: input.modelOptions?.topP,
|
|
115
|
+
frequencyPenalty: input.modelOptions?.frequencyPenalty,
|
|
116
|
+
presencePenalty: input.modelOptions?.presencePenalty,
|
|
117
|
+
responseMimeType: jsonSchema ? "application/json" : undefined,
|
|
118
|
+
responseSchema: jsonSchema
|
|
119
|
+
? openAISchemaToGeminiSchema(jsonSchema)
|
|
120
|
+
: undefined,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
function toolConfigFromInputToolChoice(toolChoice) {
|
|
124
|
+
if (!toolChoice)
|
|
125
|
+
return undefined;
|
|
126
|
+
const selectedToolFunctionName = typeof toolChoice === "object" ? toolChoice.function.name : undefined;
|
|
127
|
+
return !toolChoice
|
|
128
|
+
? undefined
|
|
129
|
+
: {
|
|
130
|
+
functionCallingConfig: {
|
|
131
|
+
mode: toolChoice === "required" || selectedToolFunctionName
|
|
132
|
+
? generative_ai_1.FunctionCallingMode.ANY
|
|
133
|
+
: toolChoice === "none"
|
|
134
|
+
? generative_ai_1.FunctionCallingMode.NONE
|
|
135
|
+
: generative_ai_1.FunctionCallingMode.AUTO,
|
|
136
|
+
allowedFunctionNames: selectedToolFunctionName
|
|
137
|
+
? [selectedToolFunctionName]
|
|
138
|
+
: undefined,
|
|
139
|
+
},
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
function toolsFromInputTools(tools) {
|
|
143
|
+
return tools?.length
|
|
144
|
+
? [
|
|
145
|
+
{
|
|
146
|
+
functionDeclarations: tools.map((i) => ({
|
|
147
|
+
name: i.function.name,
|
|
148
|
+
description: i.function.description,
|
|
149
|
+
parameters: !i.function.parameters ||
|
|
150
|
+
Object.keys(i.function.parameters).length === 0
|
|
151
|
+
? undefined
|
|
152
|
+
: parameterSchemaToFunctionDeclarationSchema(i.function.parameters),
|
|
153
|
+
})),
|
|
154
|
+
},
|
|
155
|
+
]
|
|
156
|
+
: undefined;
|
|
157
|
+
}
|
|
158
|
+
function openAISchemaToGeminiSchema(schema) {
|
|
159
|
+
if (schema.type === "string") {
|
|
160
|
+
return {
|
|
161
|
+
type: generative_ai_1.SchemaType.STRING,
|
|
162
|
+
description: schema.description,
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
if (schema.type === "number") {
|
|
166
|
+
return {
|
|
167
|
+
type: generative_ai_1.SchemaType.NUMBER,
|
|
168
|
+
description: schema.description,
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
if (schema.type === "boolean") {
|
|
172
|
+
return {
|
|
173
|
+
type: generative_ai_1.SchemaType.BOOLEAN,
|
|
174
|
+
description: schema.description,
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
if (schema.type === "object") {
|
|
178
|
+
return {
|
|
179
|
+
type: generative_ai_1.SchemaType.OBJECT,
|
|
180
|
+
description: schema.description,
|
|
181
|
+
properties: Object.fromEntries(Object.entries(schema.properties).map(([key, s]) => [
|
|
182
|
+
key,
|
|
183
|
+
openAISchemaToGeminiSchema(s),
|
|
184
|
+
])),
|
|
185
|
+
required: schema.required,
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
if (schema.type === "array") {
|
|
189
|
+
return {
|
|
190
|
+
type: generative_ai_1.SchemaType.ARRAY,
|
|
191
|
+
items: openAISchemaToGeminiSchema(schema.items),
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
throw new Error(`Unsupported schema type ${schema.type}`);
|
|
195
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
|
+
export declare class OpenaiLLMModel extends LLMModel {
|
|
3
|
+
config: {
|
|
4
|
+
apiKey: string;
|
|
5
|
+
model: string;
|
|
6
|
+
};
|
|
7
|
+
constructor(config: {
|
|
8
|
+
apiKey: string;
|
|
9
|
+
model: string;
|
|
10
|
+
});
|
|
11
|
+
private client;
|
|
12
|
+
process(input: LLMModelInputs): AsyncGenerator<{
|
|
13
|
+
$text: string | undefined;
|
|
14
|
+
delta: {
|
|
15
|
+
toolCalls: {
|
|
16
|
+
id?: string;
|
|
17
|
+
type?: "function";
|
|
18
|
+
function?: {
|
|
19
|
+
name?: string;
|
|
20
|
+
arguments?: string;
|
|
21
|
+
};
|
|
22
|
+
}[];
|
|
23
|
+
};
|
|
24
|
+
}, void, unknown>;
|
|
25
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.OpenaiLLMModel = void 0;
|
|
7
|
+
const nanoid_1 = require("nanoid");
|
|
8
|
+
const openai_1 = __importDefault(require("openai"));
|
|
9
|
+
const llm_model_1 = require("../llm-model");
|
|
10
|
+
const is_non_nullable_1 = require("../utils/is-non-nullable");
|
|
11
|
+
class OpenaiLLMModel extends llm_model_1.LLMModel {
|
|
12
|
+
config;
|
|
13
|
+
constructor(config) {
|
|
14
|
+
super();
|
|
15
|
+
this.config = config;
|
|
16
|
+
this.client = new openai_1.default({ apiKey: this.config.apiKey });
|
|
17
|
+
}
|
|
18
|
+
client;
|
|
19
|
+
async *process(input) {
|
|
20
|
+
const res = await this.client.chat.completions.create({
|
|
21
|
+
model: this.config.model,
|
|
22
|
+
temperature: input.modelOptions?.temperature,
|
|
23
|
+
top_p: input.modelOptions?.topP,
|
|
24
|
+
frequency_penalty: input.modelOptions?.frequencyPenalty,
|
|
25
|
+
presence_penalty: input.modelOptions?.presencePenalty,
|
|
26
|
+
messages: await contentsFromInputMessages(input.messages),
|
|
27
|
+
tools: toolsFromInputTools(input.tools),
|
|
28
|
+
tool_choice: input.toolChoice,
|
|
29
|
+
response_format: input.responseFormat?.type === "json_schema"
|
|
30
|
+
? {
|
|
31
|
+
type: "json_schema",
|
|
32
|
+
json_schema: {
|
|
33
|
+
...input.responseFormat.jsonSchema,
|
|
34
|
+
schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
|
|
35
|
+
},
|
|
36
|
+
}
|
|
37
|
+
: undefined,
|
|
38
|
+
stream: true,
|
|
39
|
+
});
|
|
40
|
+
const toolCalls = [];
|
|
41
|
+
for await (const chunk of res) {
|
|
42
|
+
const choice = chunk.choices?.[0];
|
|
43
|
+
const calls = choice?.delta.tool_calls?.map((i) => ({
|
|
44
|
+
id: i.id || (0, nanoid_1.nanoid)(),
|
|
45
|
+
type: "function",
|
|
46
|
+
function: {
|
|
47
|
+
name: i.function?.name,
|
|
48
|
+
arguments: i.function?.arguments,
|
|
49
|
+
},
|
|
50
|
+
}));
|
|
51
|
+
if (calls?.length) {
|
|
52
|
+
toolCalls.push(...calls);
|
|
53
|
+
}
|
|
54
|
+
yield {
|
|
55
|
+
$text: choice?.delta.content || undefined,
|
|
56
|
+
delta: { toolCalls },
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
exports.OpenaiLLMModel = OpenaiLLMModel;
|
|
62
|
+
async function contentsFromInputMessages(messages) {
|
|
63
|
+
return messages.map((i) => ({
|
|
64
|
+
role: i.role,
|
|
65
|
+
content: typeof i.content === "string"
|
|
66
|
+
? i.content
|
|
67
|
+
: i.content
|
|
68
|
+
.map((c) => {
|
|
69
|
+
if (c.type === "text") {
|
|
70
|
+
return { type: "text", text: c.text };
|
|
71
|
+
}
|
|
72
|
+
if (c.type === "image_url") {
|
|
73
|
+
return {
|
|
74
|
+
type: "image_url",
|
|
75
|
+
image_url: { url: c.imageUrl.url },
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
})
|
|
79
|
+
.filter(is_non_nullable_1.isNonNullable),
|
|
80
|
+
tool_call_id: i.toolCallId,
|
|
81
|
+
}));
|
|
82
|
+
}
|
|
83
|
+
function toolsFromInputTools(tools) {
|
|
84
|
+
return tools?.length
|
|
85
|
+
? tools.map((i) => ({
|
|
86
|
+
type: "function",
|
|
87
|
+
function: {
|
|
88
|
+
name: i.function.name,
|
|
89
|
+
description: i.function.description,
|
|
90
|
+
parameters: i.function.parameters,
|
|
91
|
+
},
|
|
92
|
+
}))
|
|
93
|
+
: undefined;
|
|
94
|
+
}
|
|
95
|
+
function jsonSchemaToOpenAIJsonSchema(schema) {
|
|
96
|
+
if (schema?.type === "object") {
|
|
97
|
+
const { required, properties } = schema;
|
|
98
|
+
return {
|
|
99
|
+
...schema,
|
|
100
|
+
properties: Object.fromEntries(Object.entries(properties).map(([key, value]) => {
|
|
101
|
+
const valueSchema = jsonSchemaToOpenAIJsonSchema(value);
|
|
102
|
+
// NOTE: All fields must be required https://platform.openai.com/docs/guides/structured-outputs/all-fields-must-be-required
|
|
103
|
+
return [
|
|
104
|
+
key,
|
|
105
|
+
required?.includes(key)
|
|
106
|
+
? valueSchema
|
|
107
|
+
: { anyOf: [valueSchema, { type: ["null"] }] },
|
|
108
|
+
];
|
|
109
|
+
})),
|
|
110
|
+
required: Object.keys(properties),
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
if (schema?.type === "array") {
|
|
114
|
+
return {
|
|
115
|
+
...schema,
|
|
116
|
+
items: jsonSchemaToOpenAIJsonSchema(schema.items),
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
return schema;
|
|
120
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { type DependencyContainer } from "tsyringe";
|
|
2
|
+
import type { constructor as Constructor } from "tsyringe/dist/typings/types";
|
|
3
|
+
import type { Context, ContextState } from "./context";
|
|
4
|
+
import type { FunctionRunner } from "./function-runner";
|
|
5
|
+
import type { LLMModel, LLMModelConfiguration } from "./llm-model";
|
|
6
|
+
import { Runnable, type RunnableDefinition } from "./runnable";
|
|
7
|
+
import { OrderedRecord } from "./utils/ordered-map";
|
|
8
|
+
import type { DeepPartial } from "./utils/partial";
|
|
9
|
+
export interface RuntimeConfiguration {
|
|
10
|
+
llmModel?: LLMModelConfiguration;
|
|
11
|
+
}
|
|
12
|
+
export interface RuntimeOptions<Agents extends {
|
|
13
|
+
[name: string]: Runnable;
|
|
14
|
+
}, State extends ContextState> {
|
|
15
|
+
id?: string;
|
|
16
|
+
name?: string;
|
|
17
|
+
config?: RuntimeConfiguration;
|
|
18
|
+
state?: State;
|
|
19
|
+
agents?: Agents;
|
|
20
|
+
llmModel?: LLMModel | Constructor<LLMModel>;
|
|
21
|
+
functionRunner?: FunctionRunner | Constructor<FunctionRunner>;
|
|
22
|
+
}
|
|
23
|
+
export declare class Runtime<Agents extends {
|
|
24
|
+
[name: string]: Runnable;
|
|
25
|
+
} = {}, State extends ContextState = ContextState> implements Context<State> {
|
|
26
|
+
constructor(options?: RuntimeOptions<Agents, State>);
|
|
27
|
+
protected inner: RuntimeInner<Agents, State>;
|
|
28
|
+
get options(): RuntimeOptions<Agents, State>;
|
|
29
|
+
get id(): string;
|
|
30
|
+
get name(): string | undefined;
|
|
31
|
+
config: RuntimeConfiguration;
|
|
32
|
+
state: State;
|
|
33
|
+
agents: Agents;
|
|
34
|
+
private container;
|
|
35
|
+
setup(config: DeepPartial<RuntimeConfiguration>): void;
|
|
36
|
+
register<R extends Array<RunnableDefinition | Runnable> = []>(...runnables: R): void;
|
|
37
|
+
private resolveSync;
|
|
38
|
+
resolve<T extends Runnable>(id: string | RunnableDefinition | T): Promise<T>;
|
|
39
|
+
resolveDependency<T>(token: string | symbol): T;
|
|
40
|
+
copy<State extends ContextState = ContextState>(options: Pick<RuntimeOptions<Agents, State>, "state" | "config">): Runtime<Agents, State>;
|
|
41
|
+
}
|
|
42
|
+
declare class RuntimeInner<Agents extends {
|
|
43
|
+
[name: string]: Runnable;
|
|
44
|
+
} = {}, State extends ContextState = ContextState> {
|
|
45
|
+
options: RuntimeOptions<Agents, State>;
|
|
46
|
+
constructor(options: RuntimeOptions<Agents, State>);
|
|
47
|
+
readonly id: string;
|
|
48
|
+
readonly name?: string;
|
|
49
|
+
config: RuntimeConfiguration;
|
|
50
|
+
state: State;
|
|
51
|
+
container: DependencyContainer;
|
|
52
|
+
runnableDefinitions: OrderedRecord<RunnableDefinition>;
|
|
53
|
+
registerDependency<T>(token: string | symbol, dependency: Constructor<T> | T): void;
|
|
54
|
+
}
|
|
55
|
+
export {};
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var Runtime_1;
|
|
12
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
13
|
+
exports.Runtime = void 0;
|
|
14
|
+
const immer_1 = require("immer");
|
|
15
|
+
const lodash_1 = require("lodash");
|
|
16
|
+
const nanoid_1 = require("nanoid");
|
|
17
|
+
const tsyringe_1 = require("tsyringe");
|
|
18
|
+
const constants_1 = require("./constants");
|
|
19
|
+
const function_agent_1 = require("./function-agent");
|
|
20
|
+
const llm_agent_1 = require("./llm-agent");
|
|
21
|
+
const llm_decision_agent_1 = require("./llm-decision-agent");
|
|
22
|
+
const local_function_agent_1 = require("./local-function-agent");
|
|
23
|
+
const open_api_agent_1 = require("./open-api-agent");
|
|
24
|
+
const pipeline_agent_1 = require("./pipeline-agent");
|
|
25
|
+
const runnable_1 = require("./runnable");
|
|
26
|
+
const ordered_map_1 = require("./utils/ordered-map");
|
|
27
|
+
let Runtime = Runtime_1 = class Runtime {
|
|
28
|
+
constructor(options = {}) {
|
|
29
|
+
// support copy inner from a existing runtime, but not expose to public
|
|
30
|
+
const inner = options.inner;
|
|
31
|
+
if (inner instanceof RuntimeInner)
|
|
32
|
+
this.inner = inner;
|
|
33
|
+
else
|
|
34
|
+
this.inner = new RuntimeInner(options);
|
|
35
|
+
this.container = this.inner.container.createChildContainer();
|
|
36
|
+
this.container.register(constants_1.TYPES.context, { useValue: this });
|
|
37
|
+
this.config = options.config || { ...this.inner.config };
|
|
38
|
+
this.state = options.state || { ...this.inner.state };
|
|
39
|
+
}
|
|
40
|
+
inner;
|
|
41
|
+
get options() {
|
|
42
|
+
return this.inner.options;
|
|
43
|
+
}
|
|
44
|
+
get id() {
|
|
45
|
+
return this.inner.id;
|
|
46
|
+
}
|
|
47
|
+
get name() {
|
|
48
|
+
return this.inner.name;
|
|
49
|
+
}
|
|
50
|
+
config;
|
|
51
|
+
state;
|
|
52
|
+
agents = new Proxy({}, { get: (_, prop) => this.resolveSync(prop.toString()) });
|
|
53
|
+
container;
|
|
54
|
+
setup(config) {
|
|
55
|
+
this.config = (0, immer_1.produce)(this.config, (draft) => {
|
|
56
|
+
(0, lodash_1.merge)(draft, config);
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
register(...runnables) {
|
|
60
|
+
for (const runnable of runnables) {
|
|
61
|
+
ordered_map_1.OrderedRecord.pushOrUpdate(this.inner.runnableDefinitions, runnable instanceof runnable_1.Runnable ? runnable.definition : runnable);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
resolveSync(idOrRunnable) {
|
|
65
|
+
const runnableId = typeof idOrRunnable === "string" ? idOrRunnable : idOrRunnable.id;
|
|
66
|
+
// Find runnable definition by id or name
|
|
67
|
+
let definition = this.inner.runnableDefinitions[runnableId] ??
|
|
68
|
+
ordered_map_1.OrderedRecord.find(this.inner.runnableDefinitions, (def) => def.name === runnableId);
|
|
69
|
+
if (!definition) {
|
|
70
|
+
// extract definition from runnable
|
|
71
|
+
if (idOrRunnable instanceof runnable_1.Runnable)
|
|
72
|
+
definition = idOrRunnable.definition;
|
|
73
|
+
// directly use runnable as definition
|
|
74
|
+
else if (typeof idOrRunnable === "object")
|
|
75
|
+
definition = idOrRunnable;
|
|
76
|
+
}
|
|
77
|
+
if (definition) {
|
|
78
|
+
const childContainer = this.container
|
|
79
|
+
.createChildContainer()
|
|
80
|
+
.register(constants_1.TYPES.definition, { useValue: definition });
|
|
81
|
+
const result = childContainer.resolve(definition.type);
|
|
82
|
+
childContainer.dispose();
|
|
83
|
+
return result;
|
|
84
|
+
}
|
|
85
|
+
throw new Error(`Runnable not found: ${idOrRunnable}`);
|
|
86
|
+
}
|
|
87
|
+
async resolve(id) {
|
|
88
|
+
return this.resolveSync(id);
|
|
89
|
+
}
|
|
90
|
+
resolveDependency(token) {
|
|
91
|
+
return this.container.resolve(token);
|
|
92
|
+
}
|
|
93
|
+
copy(options) {
|
|
94
|
+
const clone = new Runtime_1({
|
|
95
|
+
...options,
|
|
96
|
+
// Copy inner runtime
|
|
97
|
+
...{ inner: this.inner },
|
|
98
|
+
});
|
|
99
|
+
return clone;
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
exports.Runtime = Runtime;
|
|
103
|
+
exports.Runtime = Runtime = Runtime_1 = __decorate([
|
|
104
|
+
(0, tsyringe_1.injectable)(),
|
|
105
|
+
__metadata("design:paramtypes", [Object])
|
|
106
|
+
], Runtime);
|
|
107
|
+
class RuntimeInner {
|
|
108
|
+
options;
|
|
109
|
+
constructor(options) {
|
|
110
|
+
this.options = options;
|
|
111
|
+
this.name = options.name;
|
|
112
|
+
this.id = options.id || this.name || (0, nanoid_1.nanoid)();
|
|
113
|
+
this.config = options.config || {};
|
|
114
|
+
this.state = options.state || {};
|
|
115
|
+
this.container.register("pipeline_agent", { useClass: pipeline_agent_1.PipelineAgent });
|
|
116
|
+
this.container.register("llm_agent", { useClass: llm_agent_1.LLMAgent });
|
|
117
|
+
this.container.register("function_agent", { useClass: function_agent_1.FunctionAgent });
|
|
118
|
+
this.container.register("llm_decision_agent", {
|
|
119
|
+
useClass: llm_decision_agent_1.LLMDecisionAgent,
|
|
120
|
+
});
|
|
121
|
+
this.container.register("local_function_agent", {
|
|
122
|
+
useClass: local_function_agent_1.LocalFunctionAgent,
|
|
123
|
+
});
|
|
124
|
+
this.container.register("open_api_agent", { useClass: open_api_agent_1.OpenAPIAgent });
|
|
125
|
+
if (options.functionRunner)
|
|
126
|
+
this.registerDependency(constants_1.TYPES.functionRunner, options.functionRunner);
|
|
127
|
+
if (options.llmModel)
|
|
128
|
+
this.registerDependency(constants_1.TYPES.llmModel, options.llmModel);
|
|
129
|
+
}
|
|
130
|
+
id;
|
|
131
|
+
name;
|
|
132
|
+
config;
|
|
133
|
+
state;
|
|
134
|
+
container = tsyringe_1.container.createChildContainer();
|
|
135
|
+
runnableDefinitions = ordered_map_1.OrderedRecord.fromArray([]);
|
|
136
|
+
registerDependency(token, dependency) {
|
|
137
|
+
if (typeof dependency === "function")
|
|
138
|
+
this.container.register(token, {
|
|
139
|
+
useClass: dependency,
|
|
140
|
+
});
|
|
141
|
+
else
|
|
142
|
+
this.container.register(token, { useValue: dependency });
|
|
143
|
+
}
|
|
144
|
+
}
|
package/lib/cjs/utils/index.d.ts
CHANGED
package/lib/cjs/utils/index.js
CHANGED
|
@@ -22,5 +22,6 @@ __exportStar(require("./nullable"), exports);
|
|
|
22
22
|
__exportStar(require("./omit"), exports);
|
|
23
23
|
__exportStar(require("./open-api-parameter"), exports);
|
|
24
24
|
__exportStar(require("./ordered-map"), exports);
|
|
25
|
+
__exportStar(require("./partial"), exports);
|
|
25
26
|
__exportStar(require("./stream-utils"), exports);
|
|
26
27
|
__exportStar(require("./union"), exports);
|