ai-world-sdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1006 -0
- package/dist/__tests__/example.test.d.ts +5 -0
- package/dist/__tests__/example.test.js +533 -0
- package/dist/base.d.ts +96 -0
- package/dist/base.js +181 -0
- package/dist/chat_models/anthropic.d.ts +11 -0
- package/dist/chat_models/anthropic.js +17 -0
- package/dist/chat_models/google.d.ts +11 -0
- package/dist/chat_models/google.js +17 -0
- package/dist/chat_models/openai.d.ts +11 -0
- package/dist/chat_models/openai.js +17 -0
- package/dist/config.d.ts +46 -0
- package/dist/config.js +67 -0
- package/dist/image_generation.d.ts +38 -0
- package/dist/image_generation.js +70 -0
- package/dist/index.d.ts +27 -0
- package/dist/index.js +77 -0
- package/dist/messages.d.ts +71 -0
- package/dist/messages.js +73 -0
- package/dist/video_generation.d.ts +91 -0
- package/dist/video_generation.js +88 -0
- package/package.json +56 -0
package/dist/base.js
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Base chat model class
|
|
4
|
+
* Similar to LangChain.js BaseChatModel
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.BaseChatModel = void 0;
|
|
8
|
+
const messages_1 = require("./messages");
|
|
9
|
+
const config_1 = require("./config");
|
|
10
|
+
class BaseChatModel {
|
|
11
|
+
constructor(config) {
|
|
12
|
+
// 使用提供的 baseUrl,否则使用全局配置
|
|
13
|
+
const baseUrl = config.baseUrl || config_1.sdkConfig.getBaseUrl();
|
|
14
|
+
if (!baseUrl) {
|
|
15
|
+
throw new Error("baseUrl is required. Either provide it in config or set it globally using sdkConfig.setBaseUrl()");
|
|
16
|
+
}
|
|
17
|
+
this.baseUrl = baseUrl.replace(/\/$/, "");
|
|
18
|
+
// 合并全局 headers 和配置 headers
|
|
19
|
+
const globalHeaders = config_1.sdkConfig.getHeaders();
|
|
20
|
+
this.headers = {
|
|
21
|
+
"Content-Type": "application/json",
|
|
22
|
+
...globalHeaders,
|
|
23
|
+
...config.headers,
|
|
24
|
+
};
|
|
25
|
+
// 使用提供的 token,否则使用全局 token
|
|
26
|
+
const token = config.token || config_1.sdkConfig.getToken();
|
|
27
|
+
if (token) {
|
|
28
|
+
this.headers["Authorization"] = `Bearer ${token}`;
|
|
29
|
+
}
|
|
30
|
+
// 如果提供了 provider,添加到请求头
|
|
31
|
+
if (config.provider) {
|
|
32
|
+
this.headers["X-Provider"] = config.provider;
|
|
33
|
+
this.provider = config.provider;
|
|
34
|
+
}
|
|
35
|
+
// 如果提供了 apiKey,添加到请求头(用于指定后端使用的环境变量名)
|
|
36
|
+
if (config.apiKey) {
|
|
37
|
+
this.headers["X-Api-Key-Env"] = config.apiKey;
|
|
38
|
+
this.apiKey = config.apiKey;
|
|
39
|
+
}
|
|
40
|
+
this.temperature = config.temperature ?? 0.7;
|
|
41
|
+
this.maxTokens = config.maxTokens;
|
|
42
|
+
this.topP = config.topP;
|
|
43
|
+
this.modelName = config.modelName;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Invoke the model with messages (non-streaming)
|
|
47
|
+
* 遵循标准 LangChain API
|
|
48
|
+
*/
|
|
49
|
+
async invoke(messages) {
|
|
50
|
+
const options = this._mergeOptions({});
|
|
51
|
+
const response = await fetch(`${this.baseUrl}/api/langchain-proxy/invoke`, {
|
|
52
|
+
method: "POST",
|
|
53
|
+
headers: this.headers,
|
|
54
|
+
body: JSON.stringify({
|
|
55
|
+
messages: messages.map((msg) => msg.toJSON()),
|
|
56
|
+
config: {
|
|
57
|
+
temperature: options.temperature ?? this.temperature,
|
|
58
|
+
max_tokens: options.maxTokens ?? this.maxTokens,
|
|
59
|
+
top_p: options.topP ?? this.topP,
|
|
60
|
+
tools: options.tools,
|
|
61
|
+
tool_choice: options.toolChoice,
|
|
62
|
+
},
|
|
63
|
+
model: this.modelName,
|
|
64
|
+
provider: this.provider,
|
|
65
|
+
api_key_env: this.apiKey,
|
|
66
|
+
}),
|
|
67
|
+
});
|
|
68
|
+
if (!response.ok) {
|
|
69
|
+
const errorText = await response.text();
|
|
70
|
+
throw new Error(`Langchain API error: ${response.status} ${errorText}`);
|
|
71
|
+
}
|
|
72
|
+
// 返回标准 AIMessage 格式(从 message_to_dict 序列化)
|
|
73
|
+
const data = (await response.json());
|
|
74
|
+
// 从标准 AIMessage 格式创建 AIMessage 对象
|
|
75
|
+
const content = data.content || "";
|
|
76
|
+
return new messages_1.AIMessage(content);
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Stream the model with messages
|
|
80
|
+
* 遵循标准 LangChain API,返回 AIMessageChunk
|
|
81
|
+
*/
|
|
82
|
+
async *stream(messages) {
|
|
83
|
+
const options = this._mergeOptions({});
|
|
84
|
+
const requestBody = {
|
|
85
|
+
messages: messages.map((msg) => msg.toJSON()),
|
|
86
|
+
config: {
|
|
87
|
+
temperature: options.temperature ?? this.temperature,
|
|
88
|
+
max_tokens: options.maxTokens ?? this.maxTokens,
|
|
89
|
+
top_p: options.topP ?? this.topP,
|
|
90
|
+
tools: options.tools,
|
|
91
|
+
tool_choice: options.toolChoice,
|
|
92
|
+
},
|
|
93
|
+
model: this.modelName,
|
|
94
|
+
provider: this.provider,
|
|
95
|
+
api_key_env: this.apiKey,
|
|
96
|
+
};
|
|
97
|
+
const response = await fetch(`${this.baseUrl}/api/langchain-proxy/stream`, {
|
|
98
|
+
method: "POST",
|
|
99
|
+
headers: {
|
|
100
|
+
...this.headers,
|
|
101
|
+
Accept: "text/event-stream",
|
|
102
|
+
},
|
|
103
|
+
body: JSON.stringify(requestBody),
|
|
104
|
+
});
|
|
105
|
+
if (!response.ok) {
|
|
106
|
+
const errorText = await response.text();
|
|
107
|
+
throw new Error(`Langchain API error: ${response.status} ${errorText}`);
|
|
108
|
+
}
|
|
109
|
+
const decoder = new TextDecoder();
|
|
110
|
+
for await (const chunk of response.body?.values() || []) {
|
|
111
|
+
try {
|
|
112
|
+
yield new messages_1.AIMessageChunk(JSON.parse(decoder.decode(chunk)));
|
|
113
|
+
}
|
|
114
|
+
catch {
|
|
115
|
+
// 忽略解析错误
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Batch invoke multiple message sets
|
|
121
|
+
* 遵循标准 LangChain API
|
|
122
|
+
*/
|
|
123
|
+
async batch(messagesList) {
|
|
124
|
+
const response = await fetch(`${this.baseUrl}/api/langchain-proxy/batch`, {
|
|
125
|
+
method: "POST",
|
|
126
|
+
headers: this.headers,
|
|
127
|
+
body: JSON.stringify({
|
|
128
|
+
messages_list: messagesList.map((msgs) => msgs.map((msg) => msg.toJSON())),
|
|
129
|
+
config: {
|
|
130
|
+
temperature: this.temperature,
|
|
131
|
+
max_tokens: this.maxTokens,
|
|
132
|
+
top_p: this.topP,
|
|
133
|
+
},
|
|
134
|
+
model: this.modelName,
|
|
135
|
+
provider: this.provider,
|
|
136
|
+
api_key_env: this.apiKey,
|
|
137
|
+
}),
|
|
138
|
+
});
|
|
139
|
+
if (!response.ok) {
|
|
140
|
+
const errorText = await response.text();
|
|
141
|
+
throw new Error(`Langchain API error: ${response.status} ${errorText}`);
|
|
142
|
+
}
|
|
143
|
+
// 返回标准 AIMessage 格式列表(从 message_to_dict 序列化)
|
|
144
|
+
const results = (await response.json());
|
|
145
|
+
return results.map((result) => new messages_1.AIMessage(result.content || ""));
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Bind options to the model (temperature, maxTokens, tools, etc.)
|
|
149
|
+
* Returns a new model instance with bound options
|
|
150
|
+
* Similar to LangChain.js bind method
|
|
151
|
+
*/
|
|
152
|
+
bind(options) {
|
|
153
|
+
const newModel = Object.create(Object.getPrototypeOf(this));
|
|
154
|
+
Object.assign(newModel, this);
|
|
155
|
+
newModel.boundOptions = { ...this.boundOptions, ...options };
|
|
156
|
+
return newModel;
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Bind tools to the model
|
|
160
|
+
* Similar to LangChain.js bindTools method
|
|
161
|
+
*/
|
|
162
|
+
bindTools(tools) {
|
|
163
|
+
return this.bind({ tools });
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Merge bound options with provided options
|
|
167
|
+
*/
|
|
168
|
+
_mergeOptions(options) {
|
|
169
|
+
return {
|
|
170
|
+
...this.boundOptions,
|
|
171
|
+
...options,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Get the current model name
|
|
176
|
+
*/
|
|
177
|
+
getModelName() {
|
|
178
|
+
return this.modelName;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
exports.BaseChatModel = BaseChatModel;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ChatAnthropic - Anthropic Claude chat model
|
|
3
|
+
* Similar to LangChain.js ChatAnthropic
|
|
4
|
+
*/
|
|
5
|
+
import { BaseChatModel, BaseChatModelParams } from "../base";
|
|
6
|
+
export interface ChatAnthropicConfig extends BaseChatModelParams {
|
|
7
|
+
modelName: string;
|
|
8
|
+
}
|
|
9
|
+
export declare class ChatAnthropic extends BaseChatModel {
|
|
10
|
+
constructor(config: ChatAnthropicConfig);
|
|
11
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* ChatAnthropic - Anthropic Claude chat model
|
|
4
|
+
* Similar to LangChain.js ChatAnthropic
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.ChatAnthropic = void 0;
|
|
8
|
+
const base_1 = require("../base");
|
|
9
|
+
class ChatAnthropic extends base_1.BaseChatModel {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
super({
|
|
12
|
+
...config,
|
|
13
|
+
modelName: config.modelName || "claude-3-sonnet-20240229",
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.ChatAnthropic = ChatAnthropic;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ChatGoogleGenerativeAI - Google Gemini chat model
|
|
3
|
+
* Similar to LangChain.js ChatGoogleGenerativeAI
|
|
4
|
+
*/
|
|
5
|
+
import { BaseChatModel, BaseChatModelParams } from "../base";
|
|
6
|
+
export interface ChatGoogleGenerativeAIConfig extends BaseChatModelParams {
|
|
7
|
+
modelName: string;
|
|
8
|
+
}
|
|
9
|
+
export declare class ChatGoogleGenerativeAI extends BaseChatModel {
|
|
10
|
+
constructor(config: ChatGoogleGenerativeAIConfig);
|
|
11
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* ChatGoogleGenerativeAI - Google Gemini chat model
|
|
4
|
+
* Similar to LangChain.js ChatGoogleGenerativeAI
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.ChatGoogleGenerativeAI = void 0;
|
|
8
|
+
const base_1 = require("../base");
|
|
9
|
+
class ChatGoogleGenerativeAI extends base_1.BaseChatModel {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
super({
|
|
12
|
+
...config,
|
|
13
|
+
modelName: config.modelName || "gemini-1.5-pro",
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.ChatGoogleGenerativeAI = ChatGoogleGenerativeAI;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ChatOpenAI - OpenAI chat model
|
|
3
|
+
* Similar to LangChain.js ChatOpenAI
|
|
4
|
+
*/
|
|
5
|
+
import { BaseChatModel, BaseChatModelParams } from "../base";
|
|
6
|
+
export interface ChatOpenAIConfig extends BaseChatModelParams {
|
|
7
|
+
modelName: string;
|
|
8
|
+
}
|
|
9
|
+
export declare class ChatOpenAI extends BaseChatModel {
|
|
10
|
+
constructor(config: ChatOpenAIConfig);
|
|
11
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* ChatOpenAI - OpenAI chat model
|
|
4
|
+
* Similar to LangChain.js ChatOpenAI
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.ChatOpenAI = void 0;
|
|
8
|
+
const base_1 = require("../base");
|
|
9
|
+
class ChatOpenAI extends base_1.BaseChatModel {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
super({
|
|
12
|
+
...config,
|
|
13
|
+
modelName: config.modelName || "gpt-3.5-turbo",
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.ChatOpenAI = ChatOpenAI;
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Global SDK Configuration
|
|
3
|
+
* 全局 SDK 配置
|
|
4
|
+
*/
|
|
5
|
+
declare class SDKConfig {
|
|
6
|
+
private _baseUrl;
|
|
7
|
+
private _token;
|
|
8
|
+
private _headers;
|
|
9
|
+
/**
|
|
10
|
+
* Set global base URL
|
|
11
|
+
* 设置全局 base URL
|
|
12
|
+
*/
|
|
13
|
+
setBaseUrl(baseUrl: string): void;
|
|
14
|
+
/**
|
|
15
|
+
* Get global base URL
|
|
16
|
+
* 获取全局 base URL
|
|
17
|
+
*/
|
|
18
|
+
getBaseUrl(): string | null;
|
|
19
|
+
/**
|
|
20
|
+
* Set global token
|
|
21
|
+
* 设置全局 token
|
|
22
|
+
*/
|
|
23
|
+
setToken(token: string): void;
|
|
24
|
+
/**
|
|
25
|
+
* Get global token
|
|
26
|
+
* 获取全局 token
|
|
27
|
+
*/
|
|
28
|
+
getToken(): string | null;
|
|
29
|
+
/**
|
|
30
|
+
* Set global headers
|
|
31
|
+
* 设置全局 headers
|
|
32
|
+
*/
|
|
33
|
+
setHeaders(headers: Record<string, string>): void;
|
|
34
|
+
/**
|
|
35
|
+
* Get global headers
|
|
36
|
+
* 获取全局 headers
|
|
37
|
+
*/
|
|
38
|
+
getHeaders(): Record<string, string>;
|
|
39
|
+
/**
|
|
40
|
+
* Reset all global configuration
|
|
41
|
+
* 重置所有全局配置
|
|
42
|
+
*/
|
|
43
|
+
reset(): void;
|
|
44
|
+
}
|
|
45
|
+
export declare const sdkConfig: SDKConfig;
|
|
46
|
+
export {};
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Global SDK Configuration
|
|
4
|
+
* 全局 SDK 配置
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.sdkConfig = void 0;
|
|
8
|
+
class SDKConfig {
|
|
9
|
+
constructor() {
|
|
10
|
+
this._baseUrl = null;
|
|
11
|
+
this._token = null;
|
|
12
|
+
this._headers = {};
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Set global base URL
|
|
16
|
+
* 设置全局 base URL
|
|
17
|
+
*/
|
|
18
|
+
setBaseUrl(baseUrl) {
|
|
19
|
+
this._baseUrl = baseUrl.replace(/\/$/, "");
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Get global base URL
|
|
23
|
+
* 获取全局 base URL
|
|
24
|
+
*/
|
|
25
|
+
getBaseUrl() {
|
|
26
|
+
return this._baseUrl;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Set global token
|
|
30
|
+
* 设置全局 token
|
|
31
|
+
*/
|
|
32
|
+
setToken(token) {
|
|
33
|
+
this._token = token;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Get global token
|
|
37
|
+
* 获取全局 token
|
|
38
|
+
*/
|
|
39
|
+
getToken() {
|
|
40
|
+
return this._token;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Set global headers
|
|
44
|
+
* 设置全局 headers
|
|
45
|
+
*/
|
|
46
|
+
setHeaders(headers) {
|
|
47
|
+
this._headers = { ...this._headers, ...headers };
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Get global headers
|
|
51
|
+
* 获取全局 headers
|
|
52
|
+
*/
|
|
53
|
+
getHeaders() {
|
|
54
|
+
return { ...this._headers };
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Reset all global configuration
|
|
58
|
+
* 重置所有全局配置
|
|
59
|
+
*/
|
|
60
|
+
reset() {
|
|
61
|
+
this._baseUrl = null;
|
|
62
|
+
this._token = null;
|
|
63
|
+
this._headers = {};
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
// 导出单例实例
|
|
67
|
+
exports.sdkConfig = new SDKConfig();
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Image Generation Client
|
|
3
|
+
* 图像生成客户端
|
|
4
|
+
*/
|
|
5
|
+
export interface ImageGenerationConfig {
|
|
6
|
+
baseUrl?: string;
|
|
7
|
+
token?: string;
|
|
8
|
+
headers?: Record<string, string>;
|
|
9
|
+
}
|
|
10
|
+
export interface ImageGenerationRequest {
|
|
11
|
+
model?: string;
|
|
12
|
+
prompt: string;
|
|
13
|
+
size?: string;
|
|
14
|
+
watermark?: boolean;
|
|
15
|
+
n?: number;
|
|
16
|
+
quality?: "standard" | "hd";
|
|
17
|
+
response_format?: "url" | "b64_json";
|
|
18
|
+
style?: "vivid" | "natural";
|
|
19
|
+
user?: string;
|
|
20
|
+
}
|
|
21
|
+
export interface ImageData {
|
|
22
|
+
url?: string;
|
|
23
|
+
b64_json?: string;
|
|
24
|
+
}
|
|
25
|
+
export interface ImageGenerationResponse {
|
|
26
|
+
created: number;
|
|
27
|
+
data: ImageData[];
|
|
28
|
+
}
|
|
29
|
+
export declare class ImageGenerationClient {
|
|
30
|
+
private baseUrl;
|
|
31
|
+
private headers;
|
|
32
|
+
constructor(config?: ImageGenerationConfig);
|
|
33
|
+
/**
|
|
34
|
+
* Generate images
|
|
35
|
+
* 生成图像
|
|
36
|
+
*/
|
|
37
|
+
generate(request: ImageGenerationRequest): Promise<ImageGenerationResponse>;
|
|
38
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Image Generation Client
|
|
4
|
+
* 图像生成客户端
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.ImageGenerationClient = void 0;
|
|
8
|
+
const config_1 = require("./config");
|
|
9
|
+
class ImageGenerationClient {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
// 使用提供的 baseUrl,否则使用全局配置
|
|
12
|
+
const baseUrl = config?.baseUrl || config_1.sdkConfig.getBaseUrl();
|
|
13
|
+
if (!baseUrl) {
|
|
14
|
+
throw new Error("baseUrl is required. Either provide it in config or set it globally using sdkConfig.setBaseUrl()");
|
|
15
|
+
}
|
|
16
|
+
this.baseUrl = baseUrl.replace(/\/$/, "");
|
|
17
|
+
// 合并全局 headers 和配置 headers
|
|
18
|
+
const globalHeaders = config_1.sdkConfig.getHeaders();
|
|
19
|
+
this.headers = {
|
|
20
|
+
"Content-Type": "application/json",
|
|
21
|
+
...globalHeaders,
|
|
22
|
+
...config?.headers,
|
|
23
|
+
};
|
|
24
|
+
// 使用提供的 token,否则使用全局 token
|
|
25
|
+
const token = config?.token || config_1.sdkConfig.getToken();
|
|
26
|
+
if (token) {
|
|
27
|
+
this.headers["Authorization"] = `Bearer ${token}`;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Generate images
|
|
32
|
+
* 生成图像
|
|
33
|
+
*/
|
|
34
|
+
async generate(request) {
|
|
35
|
+
const requestBody = {
|
|
36
|
+
model: request.model || "doubao-seedream-4-5-251128",
|
|
37
|
+
prompt: request.prompt,
|
|
38
|
+
size: request.size || "2K",
|
|
39
|
+
n: request.n || 1,
|
|
40
|
+
};
|
|
41
|
+
// 添加可选参数
|
|
42
|
+
if (request.watermark !== undefined) {
|
|
43
|
+
requestBody.watermark = request.watermark;
|
|
44
|
+
}
|
|
45
|
+
if (request.quality) {
|
|
46
|
+
requestBody.quality = request.quality;
|
|
47
|
+
}
|
|
48
|
+
if (request.response_format) {
|
|
49
|
+
requestBody.response_format = request.response_format;
|
|
50
|
+
}
|
|
51
|
+
if (request.style) {
|
|
52
|
+
requestBody.style = request.style;
|
|
53
|
+
}
|
|
54
|
+
if (request.user) {
|
|
55
|
+
requestBody.user = request.user;
|
|
56
|
+
}
|
|
57
|
+
const response = await fetch(`${this.baseUrl}/api/image-proxy/generate`, {
|
|
58
|
+
method: "POST",
|
|
59
|
+
headers: this.headers,
|
|
60
|
+
body: JSON.stringify(requestBody),
|
|
61
|
+
});
|
|
62
|
+
if (!response.ok) {
|
|
63
|
+
const errorText = await response.text();
|
|
64
|
+
throw new Error(`Image generation API error: ${response.status} ${errorText}`);
|
|
65
|
+
}
|
|
66
|
+
const data = (await response.json());
|
|
67
|
+
return data;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
exports.ImageGenerationClient = ImageGenerationClient;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AI World SDK
|
|
3
|
+
* TypeScript SDK for AI World Platform
|
|
4
|
+
* Includes Langchain-compatible chat models and image generation
|
|
5
|
+
* @see https://github.com/langchain-ai/langchainjs
|
|
6
|
+
*/
|
|
7
|
+
import { BaseChatModel, BaseChatModelParams } from "./base";
|
|
8
|
+
import { ImageGenerationClient, type ImageGenerationConfig, type ImageGenerationRequest, type ImageGenerationResponse } from "./image_generation";
|
|
9
|
+
import { VideoGenerationClient, type VideoGenerationConfig, type VideoGenerationRequest, type ContentGenerationTaskID, type ContentGenerationTask } from "./video_generation";
|
|
10
|
+
import { sdkConfig } from "./config";
|
|
11
|
+
export { BaseMessage, HumanMessage, AIMessage, SystemMessage, AIMessageChunk, type MessageContent, type AIMessageChunkData, } from "./messages";
|
|
12
|
+
export { BaseChatModel, type BaseChatModelParams, type ToolDefinition, type BindOptions, } from "./base";
|
|
13
|
+
export { ChatOpenAI } from "./chat_models/openai";
|
|
14
|
+
export { ChatGoogleGenerativeAI } from "./chat_models/google";
|
|
15
|
+
export { ChatAnthropic } from "./chat_models/anthropic";
|
|
16
|
+
export interface LangchainClientConfig {
|
|
17
|
+
baseUrl?: string;
|
|
18
|
+
token?: string;
|
|
19
|
+
headers?: Record<string, string>;
|
|
20
|
+
}
|
|
21
|
+
export { ImageGenerationClient, type ImageGenerationConfig, type ImageGenerationRequest, type ImageGenerationResponse, };
|
|
22
|
+
export { VideoGenerationClient, type VideoGenerationConfig, type VideoGenerationRequest, type ContentGenerationTaskID, type ContentGenerationTask, };
|
|
23
|
+
export { sdkConfig };
|
|
24
|
+
/**
|
|
25
|
+
* Create a chat model instance based on model name
|
|
26
|
+
*/
|
|
27
|
+
export declare function createChatModel(model: string, config: LangchainClientConfig & BaseChatModelParams): BaseChatModel;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* AI World SDK
|
|
4
|
+
* TypeScript SDK for AI World Platform
|
|
5
|
+
* Includes Langchain-compatible chat models and image generation
|
|
6
|
+
* @see https://github.com/langchain-ai/langchainjs
|
|
7
|
+
*/
|
|
8
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
+
exports.sdkConfig = exports.VideoGenerationClient = exports.ImageGenerationClient = exports.ChatAnthropic = exports.ChatGoogleGenerativeAI = exports.ChatOpenAI = exports.BaseChatModel = exports.AIMessageChunk = exports.SystemMessage = exports.AIMessage = exports.HumanMessage = void 0;
|
|
10
|
+
exports.createChatModel = createChatModel;
|
|
11
|
+
const openai_1 = require("./chat_models/openai");
|
|
12
|
+
const google_1 = require("./chat_models/google");
|
|
13
|
+
const anthropic_1 = require("./chat_models/anthropic");
|
|
14
|
+
const image_generation_1 = require("./image_generation");
|
|
15
|
+
Object.defineProperty(exports, "ImageGenerationClient", { enumerable: true, get: function () { return image_generation_1.ImageGenerationClient; } });
|
|
16
|
+
const video_generation_1 = require("./video_generation");
|
|
17
|
+
Object.defineProperty(exports, "VideoGenerationClient", { enumerable: true, get: function () { return video_generation_1.VideoGenerationClient; } });
|
|
18
|
+
const config_1 = require("./config");
|
|
19
|
+
Object.defineProperty(exports, "sdkConfig", { enumerable: true, get: function () { return config_1.sdkConfig; } });
|
|
20
|
+
// Re-export types and classes
|
|
21
|
+
var messages_1 = require("./messages");
|
|
22
|
+
Object.defineProperty(exports, "HumanMessage", { enumerable: true, get: function () { return messages_1.HumanMessage; } });
|
|
23
|
+
Object.defineProperty(exports, "AIMessage", { enumerable: true, get: function () { return messages_1.AIMessage; } });
|
|
24
|
+
Object.defineProperty(exports, "SystemMessage", { enumerable: true, get: function () { return messages_1.SystemMessage; } });
|
|
25
|
+
Object.defineProperty(exports, "AIMessageChunk", { enumerable: true, get: function () { return messages_1.AIMessageChunk; } });
|
|
26
|
+
var base_1 = require("./base");
|
|
27
|
+
Object.defineProperty(exports, "BaseChatModel", { enumerable: true, get: function () { return base_1.BaseChatModel; } });
|
|
28
|
+
var openai_2 = require("./chat_models/openai");
|
|
29
|
+
Object.defineProperty(exports, "ChatOpenAI", { enumerable: true, get: function () { return openai_2.ChatOpenAI; } });
|
|
30
|
+
var google_2 = require("./chat_models/google");
|
|
31
|
+
Object.defineProperty(exports, "ChatGoogleGenerativeAI", { enumerable: true, get: function () { return google_2.ChatGoogleGenerativeAI; } });
|
|
32
|
+
var anthropic_2 = require("./chat_models/anthropic");
|
|
33
|
+
Object.defineProperty(exports, "ChatAnthropic", { enumerable: true, get: function () { return anthropic_2.ChatAnthropic; } });
|
|
34
|
+
/**
|
|
35
|
+
* Create a chat model instance based on model name
|
|
36
|
+
*/
|
|
37
|
+
function createChatModel(model, config) {
|
|
38
|
+
// 如果没有提供 baseUrl,使用全局配置
|
|
39
|
+
const finalConfig = {
|
|
40
|
+
...config,
|
|
41
|
+
baseUrl: config.baseUrl || config_1.sdkConfig.getBaseUrl() || undefined,
|
|
42
|
+
token: config.token || config_1.sdkConfig.getToken() || undefined,
|
|
43
|
+
headers: {
|
|
44
|
+
...config_1.sdkConfig.getHeaders(),
|
|
45
|
+
...config.headers,
|
|
46
|
+
},
|
|
47
|
+
};
|
|
48
|
+
const modelLower = model.toLowerCase();
|
|
49
|
+
if (modelLower.startsWith("gpt") || modelLower.startsWith("o1")) {
|
|
50
|
+
return new openai_1.ChatOpenAI({
|
|
51
|
+
modelName: model,
|
|
52
|
+
...finalConfig,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
else if (modelLower.startsWith("doubao-")) {
|
|
56
|
+
// Doubao 与 OpenAI 兼容,使用 ChatOpenAI
|
|
57
|
+
return new openai_1.ChatOpenAI({
|
|
58
|
+
modelName: model,
|
|
59
|
+
...finalConfig,
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
else if (modelLower.startsWith("gemini")) {
|
|
63
|
+
return new google_1.ChatGoogleGenerativeAI({
|
|
64
|
+
modelName: model,
|
|
65
|
+
...finalConfig,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
else if (modelLower.startsWith("claude")) {
|
|
69
|
+
return new anthropic_1.ChatAnthropic({
|
|
70
|
+
modelName: model,
|
|
71
|
+
...finalConfig,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
throw new Error(`Unsupported model: ${model}. Supported models: gpt-*, o1-*, doubao-*, gemini-*, claude-*`);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Message types for Langchain SDK
|
|
3
|
+
* Similar to LangChain.js message interface
|
|
4
|
+
*/
|
|
5
|
+
export interface MessageContent {
|
|
6
|
+
type: "text" | "image_url";
|
|
7
|
+
text?: string;
|
|
8
|
+
image_url?: string;
|
|
9
|
+
}
|
|
10
|
+
export interface BaseMessage {
|
|
11
|
+
content: string | MessageContent[];
|
|
12
|
+
role?: "user" | "assistant" | "system";
|
|
13
|
+
toJSON(): {
|
|
14
|
+
role: string;
|
|
15
|
+
content: string | MessageContent[];
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
export declare class HumanMessage implements BaseMessage {
|
|
19
|
+
content: string | MessageContent[];
|
|
20
|
+
role: "user";
|
|
21
|
+
constructor(content: string | MessageContent[]);
|
|
22
|
+
toJSON(): {
|
|
23
|
+
role: string;
|
|
24
|
+
content: string | MessageContent[];
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
export declare class AIMessage implements BaseMessage {
|
|
28
|
+
content: string | MessageContent[];
|
|
29
|
+
role: "assistant";
|
|
30
|
+
constructor(content: string | MessageContent[]);
|
|
31
|
+
toJSON(): {
|
|
32
|
+
role: string;
|
|
33
|
+
content: string | MessageContent[];
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
export declare class SystemMessage implements BaseMessage {
|
|
37
|
+
content: string | MessageContent[];
|
|
38
|
+
role: "system";
|
|
39
|
+
constructor(content: string | MessageContent[]);
|
|
40
|
+
toJSON(): {
|
|
41
|
+
role: string;
|
|
42
|
+
content: string | MessageContent[];
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
export interface AIMessageChunkData {
|
|
46
|
+
content?: string | MessageContent[];
|
|
47
|
+
type?: "AIMessageChunk" | "ai";
|
|
48
|
+
id?: string;
|
|
49
|
+
tool_call_chunks?: any[];
|
|
50
|
+
chunk_position?: "last" | null;
|
|
51
|
+
response_metadata?: Record<string, any>;
|
|
52
|
+
usage_metadata?: Record<string, any>;
|
|
53
|
+
additional_kwargs?: Record<string, any>;
|
|
54
|
+
[key: string]: any;
|
|
55
|
+
}
|
|
56
|
+
export declare class AIMessageChunk implements BaseMessage {
|
|
57
|
+
content: string | MessageContent[];
|
|
58
|
+
role: "assistant";
|
|
59
|
+
type: "AIMessageChunk";
|
|
60
|
+
id?: string;
|
|
61
|
+
tool_call_chunks?: any[];
|
|
62
|
+
chunk_position?: "last" | null;
|
|
63
|
+
response_metadata?: Record<string, any>;
|
|
64
|
+
usage_metadata?: Record<string, any>;
|
|
65
|
+
additional_kwargs?: Record<string, any>;
|
|
66
|
+
constructor(data: AIMessageChunkData | string | MessageContent[]);
|
|
67
|
+
toJSON(): {
|
|
68
|
+
role: string;
|
|
69
|
+
content: string | MessageContent[];
|
|
70
|
+
};
|
|
71
|
+
}
|