@aigne/core 1.0.6 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cjs/llm-models/gemini-llm-model.d.ts +2 -4
- package/lib/cjs/llm-models/gemini-llm-model.js +4 -0
- package/lib/cjs/llm-models/openai-llm-model.d.ts +2 -4
- package/lib/cjs/llm-models/openai-llm-model.js +3 -0
- package/lib/dts/llm-models/gemini-llm-model.d.ts +2 -4
- package/lib/dts/llm-models/openai-llm-model.d.ts +2 -4
- package/lib/esm/llm-models/gemini-llm-model.d.ts +2 -4
- package/lib/esm/llm-models/gemini-llm-model.js +4 -0
- package/lib/esm/llm-models/openai-llm-model.d.ts +2 -4
- package/lib/esm/llm-models/openai-llm-model.js +3 -0
- package/package.json +4 -4
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
2
|
export declare class GeminiLLMModel extends LLMModel {
|
|
3
|
-
config
|
|
4
|
-
apiKey: string;
|
|
5
|
-
model: string;
|
|
6
|
-
};
|
|
3
|
+
private config;
|
|
7
4
|
constructor(config: {
|
|
8
5
|
apiKey: string;
|
|
9
6
|
model: string;
|
|
10
7
|
});
|
|
11
8
|
private client;
|
|
12
9
|
private model;
|
|
10
|
+
setApiKey(apiKey: string): void;
|
|
13
11
|
process(input: LLMModelInputs): AsyncGenerator<{
|
|
14
12
|
$text: string;
|
|
15
13
|
delta: {
|
|
@@ -15,6 +15,10 @@ class GeminiLLMModel extends llm_model_1.LLMModel {
|
|
|
15
15
|
}
|
|
16
16
|
client;
|
|
17
17
|
model;
|
|
18
|
+
setApiKey(apiKey) {
|
|
19
|
+
this.client = new generative_ai_1.GoogleGenerativeAI(apiKey);
|
|
20
|
+
this.model = this.client.getGenerativeModel({ model: this.config.model });
|
|
21
|
+
}
|
|
18
22
|
async *process(input) {
|
|
19
23
|
const res = await this.model.generateContentStream({
|
|
20
24
|
contents: await contentsFromInputMessages(input.messages),
|
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
2
|
export declare class OpenaiLLMModel extends LLMModel {
|
|
3
|
-
config
|
|
4
|
-
apiKey: string;
|
|
5
|
-
model: string;
|
|
6
|
-
};
|
|
3
|
+
private config;
|
|
7
4
|
constructor(config: {
|
|
8
5
|
apiKey: string;
|
|
9
6
|
model: string;
|
|
10
7
|
});
|
|
11
8
|
private client;
|
|
9
|
+
setApiKey(apiKey: string): void;
|
|
12
10
|
process(input: LLMModelInputs): AsyncGenerator<{
|
|
13
11
|
$text: string | undefined;
|
|
14
12
|
delta: {
|
|
@@ -16,6 +16,9 @@ class OpenaiLLMModel extends llm_model_1.LLMModel {
|
|
|
16
16
|
this.client = new openai_1.default({ apiKey: this.config.apiKey });
|
|
17
17
|
}
|
|
18
18
|
client;
|
|
19
|
+
setApiKey(apiKey) {
|
|
20
|
+
this.client = new openai_1.default({ apiKey });
|
|
21
|
+
}
|
|
19
22
|
async *process(input) {
|
|
20
23
|
const res = await this.client.chat.completions.create({
|
|
21
24
|
model: this.config.model,
|
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
2
|
export declare class GeminiLLMModel extends LLMModel {
|
|
3
|
-
config
|
|
4
|
-
apiKey: string;
|
|
5
|
-
model: string;
|
|
6
|
-
};
|
|
3
|
+
private config;
|
|
7
4
|
constructor(config: {
|
|
8
5
|
apiKey: string;
|
|
9
6
|
model: string;
|
|
10
7
|
});
|
|
11
8
|
private client;
|
|
12
9
|
private model;
|
|
10
|
+
setApiKey(apiKey: string): void;
|
|
13
11
|
process(input: LLMModelInputs): AsyncGenerator<{
|
|
14
12
|
$text: string;
|
|
15
13
|
delta: {
|
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
2
|
export declare class OpenaiLLMModel extends LLMModel {
|
|
3
|
-
config
|
|
4
|
-
apiKey: string;
|
|
5
|
-
model: string;
|
|
6
|
-
};
|
|
3
|
+
private config;
|
|
7
4
|
constructor(config: {
|
|
8
5
|
apiKey: string;
|
|
9
6
|
model: string;
|
|
10
7
|
});
|
|
11
8
|
private client;
|
|
9
|
+
setApiKey(apiKey: string): void;
|
|
12
10
|
process(input: LLMModelInputs): AsyncGenerator<{
|
|
13
11
|
$text: string | undefined;
|
|
14
12
|
delta: {
|
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
2
|
export declare class GeminiLLMModel extends LLMModel {
|
|
3
|
-
config
|
|
4
|
-
apiKey: string;
|
|
5
|
-
model: string;
|
|
6
|
-
};
|
|
3
|
+
private config;
|
|
7
4
|
constructor(config: {
|
|
8
5
|
apiKey: string;
|
|
9
6
|
model: string;
|
|
10
7
|
});
|
|
11
8
|
private client;
|
|
12
9
|
private model;
|
|
10
|
+
setApiKey(apiKey: string): void;
|
|
13
11
|
process(input: LLMModelInputs): AsyncGenerator<{
|
|
14
12
|
$text: string;
|
|
15
13
|
delta: {
|
|
@@ -12,6 +12,10 @@ export class GeminiLLMModel extends LLMModel {
|
|
|
12
12
|
}
|
|
13
13
|
client;
|
|
14
14
|
model;
|
|
15
|
+
setApiKey(apiKey) {
|
|
16
|
+
this.client = new GoogleGenerativeAI(apiKey);
|
|
17
|
+
this.model = this.client.getGenerativeModel({ model: this.config.model });
|
|
18
|
+
}
|
|
15
19
|
async *process(input) {
|
|
16
20
|
const res = await this.model.generateContentStream({
|
|
17
21
|
contents: await contentsFromInputMessages(input.messages),
|
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import { LLMModel, type LLMModelInputs } from "../llm-model";
|
|
2
2
|
export declare class OpenaiLLMModel extends LLMModel {
|
|
3
|
-
config
|
|
4
|
-
apiKey: string;
|
|
5
|
-
model: string;
|
|
6
|
-
};
|
|
3
|
+
private config;
|
|
7
4
|
constructor(config: {
|
|
8
5
|
apiKey: string;
|
|
9
6
|
model: string;
|
|
10
7
|
});
|
|
11
8
|
private client;
|
|
9
|
+
setApiKey(apiKey: string): void;
|
|
12
10
|
process(input: LLMModelInputs): AsyncGenerator<{
|
|
13
11
|
$text: string | undefined;
|
|
14
12
|
delta: {
|
|
@@ -10,6 +10,9 @@ export class OpenaiLLMModel extends LLMModel {
|
|
|
10
10
|
this.client = new OpenAI({ apiKey: this.config.apiKey });
|
|
11
11
|
}
|
|
12
12
|
client;
|
|
13
|
+
setApiKey(apiKey) {
|
|
14
|
+
this.client = new OpenAI({ apiKey });
|
|
15
|
+
}
|
|
13
16
|
async *process(input) {
|
|
14
17
|
const res = await this.client.chat.completions.create({
|
|
15
18
|
model: this.config.model,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/core",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.7",
|
|
4
4
|
"description": "AIGNE core library",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -29,20 +29,20 @@
|
|
|
29
29
|
"mustache": "^4.2.0",
|
|
30
30
|
"nanoid": "^5.0.9",
|
|
31
31
|
"tsyringe": "^4.8.0",
|
|
32
|
-
"ufo": "^1.5.4"
|
|
32
|
+
"ufo": "^1.5.4",
|
|
33
|
+
"openai": "^4.79.1",
|
|
34
|
+
"@google/generative-ai": "^0.21.0"
|
|
33
35
|
},
|
|
34
36
|
"peerDependencies": {
|
|
35
37
|
"@google/generative-ai": "^0.21.0",
|
|
36
38
|
"openai": "^4.79.1"
|
|
37
39
|
},
|
|
38
40
|
"devDependencies": {
|
|
39
|
-
"@google/generative-ai": "^0.21.0",
|
|
40
41
|
"@tsconfig/recommended": "^1.0.8",
|
|
41
42
|
"@types/bun": "^1.1.17",
|
|
42
43
|
"@types/lodash": "^4.17.14",
|
|
43
44
|
"core-js": "^3.40.0",
|
|
44
45
|
"npm-run-all": "^4.1.5",
|
|
45
|
-
"openai": "^4.79.1",
|
|
46
46
|
"reflect-metadata": "^0.2.2",
|
|
47
47
|
"rimraf": "^6.0.1",
|
|
48
48
|
"typescript": "^5.7.3"
|