plugin-custom-llm 1.2.0 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/externalVersion.js +9 -7
- package/dist/server/llm-providers/custom-llm.js +264 -7
- package/dist/swagger.js +39 -0
- package/package.json +9 -1
- package/src/client/client.d.ts +249 -0
- package/src/client/index.tsx +19 -0
- package/src/client/llm-providers/custom-llm/ModelSettings.tsx +139 -0
- package/src/client/llm-providers/custom-llm/ProviderSettings.tsx +115 -0
- package/src/client/llm-providers/custom-llm/index.ts +10 -0
- package/src/client/locale.ts +8 -0
- package/src/client/models/index.ts +12 -0
- package/src/client/plugin.tsx +10 -0
- package/src/index.ts +2 -0
- package/src/locale/en-US.json +29 -0
- package/src/locale/vi-VN.json +29 -0
- package/src/locale/zh-CN.json +16 -0
- package/src/server/collections/.gitkeep +0 -0
- package/src/server/index.ts +1 -0
- package/src/server/llm-providers/custom-llm.ts +992 -0
- package/src/server/plugin.ts +27 -0
- package/src/swagger.ts +9 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { SchemaComponent } from '@nocobase/client';
|
|
3
|
+
import { tval } from '@nocobase/utils/client';
|
|
4
|
+
import { namespace, useT } from '../../locale';
|
|
5
|
+
import { Collapse } from 'antd';
|
|
6
|
+
import { ModelSelect } from '@nocobase/plugin-ai/client';
|
|
7
|
+
|
|
8
|
+
const Options: React.FC = () => {
|
|
9
|
+
const t = useT();
|
|
10
|
+
return (
|
|
11
|
+
<div style={{ marginBottom: 24 }}>
|
|
12
|
+
<Collapse
|
|
13
|
+
bordered={false}
|
|
14
|
+
size="small"
|
|
15
|
+
items={[
|
|
16
|
+
{
|
|
17
|
+
key: 'options',
|
|
18
|
+
label: t('Options'),
|
|
19
|
+
forceRender: true,
|
|
20
|
+
children: (
|
|
21
|
+
<SchemaComponent
|
|
22
|
+
schema={{
|
|
23
|
+
type: 'void',
|
|
24
|
+
name: 'custom-llm',
|
|
25
|
+
properties: {
|
|
26
|
+
temperature: {
|
|
27
|
+
title: tval('Temperature', { ns: namespace }),
|
|
28
|
+
type: 'number',
|
|
29
|
+
'x-decorator': 'FormItem',
|
|
30
|
+
'x-component': 'InputNumber',
|
|
31
|
+
default: 0.7,
|
|
32
|
+
'x-component-props': {
|
|
33
|
+
step: 0.1,
|
|
34
|
+
min: 0.0,
|
|
35
|
+
max: 2.0,
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
maxCompletionTokens: {
|
|
39
|
+
title: tval('Max completion tokens', { ns: namespace }),
|
|
40
|
+
type: 'number',
|
|
41
|
+
'x-decorator': 'FormItem',
|
|
42
|
+
'x-component': 'InputNumber',
|
|
43
|
+
default: -1,
|
|
44
|
+
},
|
|
45
|
+
topP: {
|
|
46
|
+
title: tval('Top P', { ns: namespace }),
|
|
47
|
+
type: 'number',
|
|
48
|
+
'x-decorator': 'FormItem',
|
|
49
|
+
'x-component': 'InputNumber',
|
|
50
|
+
default: 1.0,
|
|
51
|
+
'x-component-props': {
|
|
52
|
+
step: 0.1,
|
|
53
|
+
min: 0.0,
|
|
54
|
+
max: 1.0,
|
|
55
|
+
},
|
|
56
|
+
},
|
|
57
|
+
frequencyPenalty: {
|
|
58
|
+
title: tval('Frequency penalty', { ns: namespace }),
|
|
59
|
+
type: 'number',
|
|
60
|
+
'x-decorator': 'FormItem',
|
|
61
|
+
'x-component': 'InputNumber',
|
|
62
|
+
default: 0.0,
|
|
63
|
+
'x-component-props': {
|
|
64
|
+
step: 0.1,
|
|
65
|
+
min: -2.0,
|
|
66
|
+
max: 2.0,
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
presencePenalty: {
|
|
70
|
+
title: tval('Presence penalty', { ns: namespace }),
|
|
71
|
+
type: 'number',
|
|
72
|
+
'x-decorator': 'FormItem',
|
|
73
|
+
'x-component': 'InputNumber',
|
|
74
|
+
default: 0.0,
|
|
75
|
+
'x-component-props': {
|
|
76
|
+
step: 0.1,
|
|
77
|
+
min: -2.0,
|
|
78
|
+
max: 2.0,
|
|
79
|
+
},
|
|
80
|
+
},
|
|
81
|
+
responseFormat: {
|
|
82
|
+
title: tval('Response format', { ns: namespace }),
|
|
83
|
+
type: 'string',
|
|
84
|
+
'x-decorator': 'FormItem',
|
|
85
|
+
'x-component': 'Select',
|
|
86
|
+
enum: [
|
|
87
|
+
{ label: t('Text'), value: 'text' },
|
|
88
|
+
{ label: t('JSON'), value: 'json_object' },
|
|
89
|
+
],
|
|
90
|
+
default: 'text',
|
|
91
|
+
},
|
|
92
|
+
timeout: {
|
|
93
|
+
title: tval('Timeout (ms)', { ns: namespace }),
|
|
94
|
+
type: 'number',
|
|
95
|
+
'x-decorator': 'FormItem',
|
|
96
|
+
'x-component': 'InputNumber',
|
|
97
|
+
default: 60000,
|
|
98
|
+
},
|
|
99
|
+
maxRetries: {
|
|
100
|
+
title: tval('Max retries', { ns: namespace }),
|
|
101
|
+
type: 'number',
|
|
102
|
+
'x-decorator': 'FormItem',
|
|
103
|
+
'x-component': 'InputNumber',
|
|
104
|
+
default: 1,
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
}}
|
|
108
|
+
/>
|
|
109
|
+
),
|
|
110
|
+
},
|
|
111
|
+
]}
|
|
112
|
+
/>
|
|
113
|
+
</div>
|
|
114
|
+
);
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
export const ModelSettingsForm: React.FC = () => {
|
|
118
|
+
return (
|
|
119
|
+
<SchemaComponent
|
|
120
|
+
components={{ Options, ModelSelect }}
|
|
121
|
+
schema={{
|
|
122
|
+
type: 'void',
|
|
123
|
+
properties: {
|
|
124
|
+
model: {
|
|
125
|
+
title: tval('Model', { ns: namespace }),
|
|
126
|
+
type: 'string',
|
|
127
|
+
required: true,
|
|
128
|
+
'x-decorator': 'FormItem',
|
|
129
|
+
'x-component': 'ModelSelect',
|
|
130
|
+
},
|
|
131
|
+
options: {
|
|
132
|
+
type: 'void',
|
|
133
|
+
'x-component': 'Options',
|
|
134
|
+
},
|
|
135
|
+
},
|
|
136
|
+
}}
|
|
137
|
+
/>
|
|
138
|
+
);
|
|
139
|
+
};
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { SchemaComponent } from '@nocobase/client';
|
|
3
|
+
import { tval } from '@nocobase/utils/client';
|
|
4
|
+
import { namespace } from '../../locale';
|
|
5
|
+
|
|
6
|
+
export const ProviderSettingsForm: React.FC = () => {
|
|
7
|
+
return (
|
|
8
|
+
<SchemaComponent
|
|
9
|
+
schema={{
|
|
10
|
+
type: 'void',
|
|
11
|
+
properties: {
|
|
12
|
+
apiKey: {
|
|
13
|
+
title: tval('API Key', { ns: namespace }),
|
|
14
|
+
type: 'string',
|
|
15
|
+
required: true,
|
|
16
|
+
'x-decorator': 'FormItem',
|
|
17
|
+
'x-component': 'TextAreaWithGlobalScope',
|
|
18
|
+
},
|
|
19
|
+
disableStream: {
|
|
20
|
+
title: tval('Disable streaming', { ns: namespace }),
|
|
21
|
+
type: 'boolean',
|
|
22
|
+
'x-decorator': 'FormItem',
|
|
23
|
+
'x-component': 'Checkbox',
|
|
24
|
+
'x-content': tval('Disable streaming description', { ns: namespace }),
|
|
25
|
+
},
|
|
26
|
+
streamKeepAlive: {
|
|
27
|
+
title: tval('Stream keepalive', { ns: namespace }),
|
|
28
|
+
type: 'boolean',
|
|
29
|
+
'x-decorator': 'FormItem',
|
|
30
|
+
'x-component': 'Checkbox',
|
|
31
|
+
'x-content': tval('Stream keepalive description', { ns: namespace }),
|
|
32
|
+
},
|
|
33
|
+
keepAliveIntervalMs: {
|
|
34
|
+
title: tval('Keepalive interval (ms)', { ns: namespace }),
|
|
35
|
+
type: 'number',
|
|
36
|
+
'x-decorator': 'FormItem',
|
|
37
|
+
'x-component': 'InputNumber',
|
|
38
|
+
'x-component-props': {
|
|
39
|
+
placeholder: '5000',
|
|
40
|
+
min: 1000,
|
|
41
|
+
step: 1000,
|
|
42
|
+
style: { width: '100%' },
|
|
43
|
+
},
|
|
44
|
+
description: tval('Keepalive interval description', { ns: namespace }),
|
|
45
|
+
},
|
|
46
|
+
keepAliveContent: {
|
|
47
|
+
title: tval('Keepalive content', { ns: namespace }),
|
|
48
|
+
type: 'string',
|
|
49
|
+
'x-decorator': 'FormItem',
|
|
50
|
+
'x-component': 'Input',
|
|
51
|
+
'x-component-props': {
|
|
52
|
+
placeholder: '...',
|
|
53
|
+
},
|
|
54
|
+
description: tval('Keepalive content description', { ns: namespace }),
|
|
55
|
+
},
|
|
56
|
+
timeout: {
|
|
57
|
+
title: tval('Timeout (ms)', { ns: namespace }),
|
|
58
|
+
type: 'number',
|
|
59
|
+
'x-decorator': 'FormItem',
|
|
60
|
+
'x-component': 'InputNumber',
|
|
61
|
+
'x-component-props': {
|
|
62
|
+
placeholder: '120000',
|
|
63
|
+
min: 0,
|
|
64
|
+
step: 1000,
|
|
65
|
+
style: { width: '100%' },
|
|
66
|
+
},
|
|
67
|
+
description: tval('Timeout description', { ns: namespace }),
|
|
68
|
+
},
|
|
69
|
+
requestConfig: {
|
|
70
|
+
title: tval('Request config (JSON)', { ns: namespace }),
|
|
71
|
+
type: 'string',
|
|
72
|
+
'x-decorator': 'FormItem',
|
|
73
|
+
'x-component': 'Input.TextArea',
|
|
74
|
+
'x-component-props': {
|
|
75
|
+
placeholder: JSON.stringify(
|
|
76
|
+
{
|
|
77
|
+
extraHeaders: {},
|
|
78
|
+
extraBody: {},
|
|
79
|
+
modelKwargs: {},
|
|
80
|
+
},
|
|
81
|
+
null,
|
|
82
|
+
2,
|
|
83
|
+
),
|
|
84
|
+
rows: 6,
|
|
85
|
+
style: { fontFamily: 'monospace', fontSize: 12 },
|
|
86
|
+
},
|
|
87
|
+
description: tval('Request config description', { ns: namespace }),
|
|
88
|
+
},
|
|
89
|
+
responseConfig: {
|
|
90
|
+
title: tval('Response config (JSON)', { ns: namespace }),
|
|
91
|
+
type: 'string',
|
|
92
|
+
'x-decorator': 'FormItem',
|
|
93
|
+
'x-component': 'Input.TextArea',
|
|
94
|
+
'x-component-props': {
|
|
95
|
+
placeholder: JSON.stringify(
|
|
96
|
+
{
|
|
97
|
+
contentPath: 'auto',
|
|
98
|
+
reasoningKey: 'reasoning_content',
|
|
99
|
+
responseMapping: {
|
|
100
|
+
content: 'message.response',
|
|
101
|
+
},
|
|
102
|
+
},
|
|
103
|
+
null,
|
|
104
|
+
2,
|
|
105
|
+
),
|
|
106
|
+
rows: 8,
|
|
107
|
+
style: { fontFamily: 'monospace', fontSize: 12 },
|
|
108
|
+
},
|
|
109
|
+
description: tval('Response config description', { ns: namespace }),
|
|
110
|
+
},
|
|
111
|
+
},
|
|
112
|
+
}}
|
|
113
|
+
/>
|
|
114
|
+
);
|
|
115
|
+
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { LLMProviderOptions } from '@nocobase/plugin-ai/client';
|
|
2
|
+
import { ProviderSettingsForm } from './ProviderSettings';
|
|
3
|
+
import { ModelSettingsForm } from './ModelSettings';
|
|
4
|
+
|
|
5
|
+
export const customLLMProviderOptions: LLMProviderOptions = {
|
|
6
|
+
components: {
|
|
7
|
+
ProviderSettingsForm,
|
|
8
|
+
ModelSettingsForm,
|
|
9
|
+
},
|
|
10
|
+
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { ModelConstructor } from '@nocobase/flow-engine';
|
|
11
|
+
|
|
12
|
+
export default {} as Record<string, ModelConstructor>;
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"Base URL": "Base URL",
|
|
3
|
+
"API Key": "API Key",
|
|
4
|
+
"Model": "Model",
|
|
5
|
+
"Options": "Options",
|
|
6
|
+
"Temperature": "Temperature",
|
|
7
|
+
"Max completion tokens": "Max completion tokens",
|
|
8
|
+
"Top P": "Top P",
|
|
9
|
+
"Frequency penalty": "Frequency penalty",
|
|
10
|
+
"Presence penalty": "Presence penalty",
|
|
11
|
+
"Response format": "Response format",
|
|
12
|
+
"Text": "Text",
|
|
13
|
+
"JSON": "JSON",
|
|
14
|
+
"Timeout (ms)": "Timeout (ms)",
|
|
15
|
+
"Timeout description": "Request timeout in milliseconds. Increase this for models with long thinking/processing phases. Default: 120000 (2 minutes).",
|
|
16
|
+
"Max retries": "Max retries",
|
|
17
|
+
"Disable streaming": "Disable streaming",
|
|
18
|
+
"Disable streaming description": "Use non-streaming mode. Enable this for models that have a long \"thinking\" phase before responding, which can cause empty stream values and processing to terminate early.",
|
|
19
|
+
"Stream keepalive": "Stream keepalive",
|
|
20
|
+
"Stream keepalive description": "Keep stream alive during model thinking. Injects placeholder content when no data arrives within the keepalive interval. Works only when streaming is enabled.",
|
|
21
|
+
"Keepalive interval (ms)": "Keepalive interval (ms)",
|
|
22
|
+
"Keepalive interval description": "Interval in milliseconds between keepalive signals. Default: 5000 (5 seconds).",
|
|
23
|
+
"Keepalive content": "Keepalive content",
|
|
24
|
+
"Keepalive content description": "Placeholder text used as keepalive signal (invisible to the user). Default: '...'",
|
|
25
|
+
"Request config (JSON)": "Request config (JSON)",
|
|
26
|
+
"Request config description": "Extra configuration for LLM requests. Supported keys: extraHeaders (custom HTTP headers), extraBody (extra request body fields), modelKwargs (LangChain model kwargs).",
|
|
27
|
+
"Response config (JSON)": "Response config (JSON)",
|
|
28
|
+
"Response config description": "Configure response parsing. contentPath: 'auto' or dot-path. reasoningKey: key for reasoning content. responseMapping: { content: 'dot.path' } — maps non-standard LLM response to OpenAI format (e.g., 'message.response')."
|
|
29
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"Base URL": "URL cơ sở",
|
|
3
|
+
"API Key": "API Key",
|
|
4
|
+
"Model": "Mô hình",
|
|
5
|
+
"Options": "Tùy chọn",
|
|
6
|
+
"Temperature": "Nhiệt độ",
|
|
7
|
+
"Max completion tokens": "Số token tối đa",
|
|
8
|
+
"Top P": "Top P",
|
|
9
|
+
"Frequency penalty": "Hình phạt tần suất",
|
|
10
|
+
"Presence penalty": "Hình phạt sự hiện diện",
|
|
11
|
+
"Response format": "Định dạng phản hồi",
|
|
12
|
+
"Text": "Văn bản",
|
|
13
|
+
"JSON": "JSON",
|
|
14
|
+
"Timeout (ms)": "Thời gian chờ (ms)",
|
|
15
|
+
"Timeout description": "Thời gian chờ request tính bằng mili giây. Tăng giá trị này cho các model có giai đoạn thinking/xử lý dài. Mặc định: 120000 (2 phút).",
|
|
16
|
+
"Max retries": "Số lần thử lại tối đa",
|
|
17
|
+
"Disable streaming": "Tắt streaming",
|
|
18
|
+
"Disable streaming description": "Sử dụng chế độ non-streaming. Bật tính năng này cho các model có giai đoạn \"thinking\" dài trước khi trả lời, gây ra stream rỗng và xử lý bị ngắt sớm.",
|
|
19
|
+
"Stream keepalive": "Giữ kết nối stream",
|
|
20
|
+
"Stream keepalive description": "Giữ stream hoạt động khi model đang thinking. Gửi nội dung giữ kết nối khi không có dữ liệu trong khoảng thời gian đã cấu hình. Chỉ hoạt động khi streaming được bật.",
|
|
21
|
+
"Keepalive interval (ms)": "Khoảng thời gian keepalive (ms)",
|
|
22
|
+
"Keepalive interval description": "Khoảng thời gian giữa các tín hiệu keepalive, tính bằng mili giây. Mặc định: 5000 (5 giây).",
|
|
23
|
+
"Keepalive content": "Nội dung keepalive",
|
|
24
|
+
"Keepalive content description": "Nội dung giữ kết nối (không hiển thị cho người dùng). Mặc định: '...'",
|
|
25
|
+
"Request config (JSON)": "Cấu hình request (JSON)",
|
|
26
|
+
"Request config description": "Cấu hình bổ sung cho request LLM. Các key hỗ trợ: extraHeaders (HTTP headers tùy chỉnh), extraBody (thêm trường vào request body), modelKwargs (tham số model LangChain).",
|
|
27
|
+
"Response config (JSON)": "Cấu hình response (JSON)",
|
|
28
|
+
"Response config description": "Cấu hình parse response. contentPath: 'auto' hoặc dot-path. reasoningKey: key reasoning. responseMapping: { content: 'dot.path' } — mapping response không chuẩn OpenAI (ví dụ: 'message.response')."
|
|
29
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"Base URL": "基础 URL",
|
|
3
|
+
"API Key": "API 密钥",
|
|
4
|
+
"Model": "模型",
|
|
5
|
+
"Options": "选项",
|
|
6
|
+
"Temperature": "温度",
|
|
7
|
+
"Max completion tokens": "最大完成令牌数",
|
|
8
|
+
"Top P": "Top P",
|
|
9
|
+
"Frequency penalty": "频率惩罚",
|
|
10
|
+
"Presence penalty": "存在惩罚",
|
|
11
|
+
"Response format": "响应格式",
|
|
12
|
+
"Text": "文本",
|
|
13
|
+
"JSON": "JSON",
|
|
14
|
+
"Timeout (ms)": "超时 (毫秒)",
|
|
15
|
+
"Max retries": "最大重试次数"
|
|
16
|
+
}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { default } from './plugin';
|