plugin-custom-llm 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1 +1,77 @@
1
- # @nocobase/plugin-custom-llm
1
+ # Plugin Custom LLM (OpenAI Compatible)
2
+
3
+ NocoBase plugin for integrating external LLM providers that support OpenAI-compatible `/chat/completions` API, with built-in response format normalization and response mapping for non-standard APIs.
4
+
5
+ ## Features
6
+
7
+ - **OpenAI-compatible**: Works with any LLM provider exposing `/chat/completions` endpoint
8
+ - **Auto content detection**: Handles both string and array content blocks (`[{type: 'text', text: '...'}]`)
9
+ - **Response mapping**: Transform non-standard API responses to OpenAI format via JSON config (supports streaming SSE and JSON)
10
+ - **Reasoning content**: Display thinking/reasoning from DeepSeek-compatible providers
11
+ - **Configurable**: JSON config editors for request and response customization
12
+ - **Locale support**: English, Vietnamese, Chinese
13
+
14
+ ## Installation
15
+
16
+ Upload `plugin-custom-llm-x.x.x.tgz` via NocoBase Plugin Manager UI, then enable.
17
+
18
+ ## Configuration
19
+
20
+ ### Provider Settings
21
+
22
+ | Field | Description |
23
+ |---|---|
24
+ | **Base URL** | LLM endpoint URL, e.g. `https://your-llm-server.com/v1` |
25
+ | **API Key** | Authentication key |
26
+ | **Request config (JSON)** | Optional. Extra request configuration |
27
+ | **Response config (JSON)** | Optional. Response parsing and mapping configuration |
28
+
29
+ ### Request Config
30
+
31
+ ```json
32
+ {
33
+ "extraHeaders": { "X-Custom-Header": "value" },
34
+ "extraBody": { "custom_field": "value" },
35
+ "modelKwargs": { "stop": ["\n"] }
36
+ }
37
+ ```
38
+
39
+ - `extraHeaders` — Custom HTTP headers sent with every request
40
+ - `extraBody` — Additional fields merged into the request body
41
+ - `modelKwargs` — Extra LangChain model parameters (stop sequences, etc.)
42
+
43
+ ### Response Config
44
+
45
+ ```json
46
+ {
47
+ "contentPath": "auto",
48
+ "reasoningKey": "reasoning_content",
49
+ "responseMapping": {
50
+ "content": "message.response"
51
+ }
52
+ }
53
+ ```
54
+
55
+ - `contentPath` — How to extract text from LangChain chunks. `"auto"` (default) detects string, array, and object formats. Or use a dot-path like `"0.text"`
56
+ - `reasoningKey` — Key name for reasoning/thinking content in `additional_kwargs` (default: `"reasoning_content"`)
57
+ - `responseMapping` — Maps non-standard LLM responses to OpenAI format before LangChain processes them:
58
+ - `content` — Dot-path to the content field in the raw response (e.g. `"message.response"`, `"data.text"`)
59
+ - `role` — Dot-path to role field (optional, defaults to `"assistant"`)
60
+ - `id` — Dot-path to response ID (optional)
61
+
62
+ ### Response Mapping Examples
63
+
64
+ | Raw LLM Response | `responseMapping.content` |
65
+ |---|---|
66
+ | `{"message": {"response": "..."}}` | `message.response` |
67
+ | `{"data": {"text": "..."}}` | `data.text` |
68
+ | `{"result": "..."}` | `result` |
69
+ | `{"output": {"content": {"text": "..."}}}` | `output.content.text` |
70
+
71
+ ### Model Settings
72
+
73
+ Standard OpenAI-compatible parameters: temperature, max tokens, top P, frequency/presence penalty, response format, timeout, max retries.
74
+
75
+ ## License
76
+
77
+ Apache-2.0
@@ -7,4 +7,4 @@
7
7
  * For more information, please refer to: https://www.nocobase.com/agreement.
8
8
  */
9
9
 
10
- !function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("react"),require("@nocobase/plugin-ai/client"),require("@nocobase/client"),require("@nocobase/utils/client"),require("antd"),require("react-i18next")):"function"==typeof define&&define.amd?define("plugin-custom-llm",["react","@nocobase/plugin-ai/client","@nocobase/client","@nocobase/utils/client","antd","react-i18next"],t):"object"==typeof exports?exports["plugin-custom-llm"]=t(require("react"),require("@nocobase/plugin-ai/client"),require("@nocobase/client"),require("@nocobase/utils/client"),require("antd"),require("react-i18next")):e["plugin-custom-llm"]=t(e.react,e["@nocobase/plugin-ai/client"],e["@nocobase/client"],e["@nocobase/utils/client"],e.antd,e["react-i18next"])}(self,function(e,t,n,o,r,i){return function(){"use strict";var u={772:function(e){e.exports=n},645:function(e){e.exports=t},584:function(e){e.exports=o},721:function(e){e.exports=r},156:function(t){t.exports=e},238:function(e){e.exports=i}},c={};function a(e){var t=c[e];if(void 0!==t)return t.exports;var n=c[e]={exports:{}};return u[e](n,n.exports,a),n.exports}a.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return a.d(t,{a:t}),t},a.d=function(e,t){for(var n in t)a.o(t,n)&&!a.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},a.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},a.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})};var l={};return!function(){a.r(l),a.d(l,{PluginCustomLLMClient:function(){return g},default:function(){return S}});var e=a(772),t=a(156),n=a.n(t),o=a(584),r=a(238),i="@nocobase/plugin-custom-llm",u=a(721),c=a(645),p=function(){var t=(0,r.useTranslation)(i,{nsMode:"fallback"}).t;return n().createElement("div",{style:{marginBottom:24}},n().createElement(u.Collapse,{bordered:!1,size:"small",items:[{key:"options",label:t("Options"),forceRender:!0,children:n().createElement(e.SchemaComponent,{schema:{type:"void",name:"custom-llm",properties:{temperature:{title:(0,o.tval)("Temperature",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:.7,"x-component-props":{step:.1,min:0,max:2}},maxCompletionTokens:{title:(0,o.tval)("Max completion tokens",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:-1},topP:{title:(0,o.tval)("Top P",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:1,"x-component-props":{step:.1,min:0,max:1}},frequencyPenalty:{title:(0,o.tval)("Frequency penalty",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:0,"x-component-props":{step:.1,min:-2,max:2}},presencePenalty:{title:(0,o.tval)("Presence penalty",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:0,"x-component-props":{step:.1,min:-2,max:2}},responseFormat:{title:(0,o.tval)("Response format",{ns:i}),type:"string","x-decorator":"FormItem","x-component":"Select",enum:[{label:t("Text"),value:"text"},{label:t("JSON"),value:"json_object"}],default:"text"},timeout:{title:(0,o.tval)("Timeout (ms)",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:6e4},maxRetries:{title:(0,o.tval)("Max retries",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:1}}}})}]}))},s={components:{ProviderSettingsForm:function(){return n().createElement(e.SchemaComponent,{schema:{type:"void",properties:{baseURL:{title:(0,o.tval)("Base URL",{ns:i}),type:"string",required:!0,"x-decorator":"FormItem","x-component":"TextAreaWithGlobalScope","x-component-props":{placeholder:"https://your-llm-server.com/v1"}},apiKey:{title:(0,o.tval)("API Key",{ns:i}),type:"string",required:!0,"x-decorator":"FormItem","x-component":"TextAreaWithGlobalScope"},requestConfig:{title:(0,o.tval)("Request config (JSON)",{ns:i}),type:"string","x-decorator":"FormItem","x-component":"Input.TextArea","x-component-props":{placeholder:JSON.stringify({extraHeaders:{},extraBody:{},modelKwargs:{}},null,2),rows:6,style:{fontFamily:"monospace",fontSize:12}},description:(0,o.tval)("Request config description",{ns:i})},responseConfig:{title:(0,o.tval)("Response config (JSON)",{ns:i}),type:"string","x-decorator":"FormItem","x-component":"Input.TextArea","x-component-props":{placeholder:JSON.stringify({contentPath:"auto",reasoningKey:"reasoning_content"},null,2),rows:4,style:{fontFamily:"monospace",fontSize:12}},description:(0,o.tval)("Response config description",{ns:i})}}}})},ModelSettingsForm:function(){return n().createElement(e.SchemaComponent,{components:{Options:p,ModelSelect:c.ModelSelect},schema:{type:"void",properties:{model:{title:(0,o.tval)("Model",{ns:i}),type:"string",required:!0,"x-decorator":"FormItem","x-component":"ModelSelect"},options:{type:"void","x-component":"Options"}}}})}}};function f(e,t,n,o,r,i,u){try{var c=e[i](u),a=c.value}catch(e){n(e);return}c.done?t(a):Promise.resolve(a).then(o,r)}function m(e){return function(){var t=this,n=arguments;return new Promise(function(o,r){var i=e.apply(t,n);function u(e){f(i,o,r,u,c,"next",e)}function c(e){f(i,o,r,u,c,"throw",e)}u(void 0)})}}function d(e,t,n){return(d=v()?Reflect.construct:function(e,t,n){var o=[null];o.push.apply(o,t);var r=new(Function.bind.apply(e,o));return n&&b(r,n.prototype),r}).apply(null,arguments)}function y(e){return(y=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function b(e,t){return(b=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function x(e){var t="function"==typeof Map?new Map:void 0;return(x=function(e){if(null===e||-1===Function.toString.call(e).indexOf("[native code]"))return e;if("function"!=typeof e)throw TypeError("Super expression must either be null or a function");if(void 0!==t){if(t.has(e))return t.get(e);t.set(e,n)}function n(){return d(e,arguments,y(this).constructor)}return n.prototype=Object.create(e.prototype,{constructor:{value:n,enumerable:!1,writable:!0,configurable:!0}}),b(n,e)})(e)}function v(){try{var e=!Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){}))}catch(e){}return(v=function(){return!!e})()}function h(e,t){var n,o,r,i,u={label:0,sent:function(){if(1&r[0])throw r[1];return r[1]},trys:[],ops:[]};return i={next:c(0),throw:c(1),return:c(2)},"function"==typeof Symbol&&(i[Symbol.iterator]=function(){return this}),i;function c(i){return function(c){var a=[i,c];if(n)throw TypeError("Generator is already executing.");for(;u;)try{if(n=1,o&&(r=2&a[0]?o.return:a[0]?o.throw||((r=o.return)&&r.call(o),0):o.next)&&!(r=r.call(o,a[1])).done)return r;switch(o=0,r&&(a=[2&a[0],r.value]),a[0]){case 0:case 1:r=a;break;case 4:return u.label++,{value:a[1],done:!1};case 5:u.label++,o=a[1],a=[0];continue;case 7:a=u.ops.pop(),u.trys.pop();continue;default:if(!(r=(r=u.trys).length>0&&r[r.length-1])&&(6===a[0]||2===a[0])){u=0;continue}if(3===a[0]&&(!r||a[1]>r[0]&&a[1]<r[3])){u.label=a[1];break}if(6===a[0]&&u.label<r[1]){u.label=r[1],r=a;break}if(r&&u.label<r[2]){u.label=r[2],u.ops.push(a);break}r[2]&&u.ops.pop(),u.trys.pop();continue}a=t.call(e,u)}catch(e){a=[6,e],o=0}finally{n=r=0}if(5&a[0])throw a[1];return{value:a[0]?a[1]:void 0,done:!0}}}}var g=function(e){var t;if("function"!=typeof e&&null!==e)throw TypeError("Super expression must either be null or a function");function n(){var e,t;if(!(this instanceof n))throw TypeError("Cannot call a class as a function");return e=n,t=arguments,e=y(e),function(e,t){var n;if(t&&("object"==((n=t)&&"undefined"!=typeof Symbol&&n.constructor===Symbol?"symbol":typeof n)||"function"==typeof t))return t;if(void 0===e)throw ReferenceError("this hasn't been initialised - super() hasn't been called");return e}(this,v()?Reflect.construct(e,t||[],y(this).constructor):e.apply(this,t))}return n.prototype=Object.create(e&&e.prototype,{constructor:{value:n,writable:!0,configurable:!0}}),e&&b(n,e),t=[{key:"afterAdd",value:function(){return m(function(){return h(this,function(e){return[2]})})()}},{key:"beforeLoad",value:function(){return m(function(){return h(this,function(e){return[2]})})()}},{key:"load",value:function(){var e=this;return m(function(){return h(this,function(t){return e.aiPlugin.aiManager.registerLLMProvider("custom-llm",s),[2]})})()}},{key:"aiPlugin",get:function(){return this.app.pm.get("ai")}}],function(e,t){for(var n=0;n<t.length;n++){var o=t[n];o.enumerable=o.enumerable||!1,o.configurable=!0,"value"in o&&(o.writable=!0),Object.defineProperty(e,o.key,o)}}(n.prototype,t),n}(x(e.Plugin)),S=g}(),l}()});
10
+ !function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("react"),require("@nocobase/plugin-ai/client"),require("@nocobase/client"),require("@nocobase/utils/client"),require("antd"),require("react-i18next")):"function"==typeof define&&define.amd?define("plugin-custom-llm",["react","@nocobase/plugin-ai/client","@nocobase/client","@nocobase/utils/client","antd","react-i18next"],t):"object"==typeof exports?exports["plugin-custom-llm"]=t(require("react"),require("@nocobase/plugin-ai/client"),require("@nocobase/client"),require("@nocobase/utils/client"),require("antd"),require("react-i18next")):e["plugin-custom-llm"]=t(e.react,e["@nocobase/plugin-ai/client"],e["@nocobase/client"],e["@nocobase/utils/client"],e.antd,e["react-i18next"])}(self,function(e,t,n,o,r,i){return function(){"use strict";var u={772:function(e){e.exports=n},645:function(e){e.exports=t},584:function(e){e.exports=o},721:function(e){e.exports=r},156:function(t){t.exports=e},238:function(e){e.exports=i}},c={};function a(e){var t=c[e];if(void 0!==t)return t.exports;var n=c[e]={exports:{}};return u[e](n,n.exports,a),n.exports}a.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return a.d(t,{a:t}),t},a.d=function(e,t){for(var n in t)a.o(t,n)&&!a.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},a.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},a.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})};var l={};return!function(){a.r(l),a.d(l,{PluginCustomLLMClient:function(){return h},default:function(){return S}});var e=a(772),t=a(156),n=a.n(t),o=a(584),r=a(238),i="@nocobase/plugin-custom-llm",u=a(721),c=a(645),p=function(){var t=(0,r.useTranslation)(i,{nsMode:"fallback"}).t;return n().createElement("div",{style:{marginBottom:24}},n().createElement(u.Collapse,{bordered:!1,size:"small",items:[{key:"options",label:t("Options"),forceRender:!0,children:n().createElement(e.SchemaComponent,{schema:{type:"void",name:"custom-llm",properties:{temperature:{title:(0,o.tval)("Temperature",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:.7,"x-component-props":{step:.1,min:0,max:2}},maxCompletionTokens:{title:(0,o.tval)("Max completion tokens",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:-1},topP:{title:(0,o.tval)("Top P",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:1,"x-component-props":{step:.1,min:0,max:1}},frequencyPenalty:{title:(0,o.tval)("Frequency penalty",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:0,"x-component-props":{step:.1,min:-2,max:2}},presencePenalty:{title:(0,o.tval)("Presence penalty",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:0,"x-component-props":{step:.1,min:-2,max:2}},responseFormat:{title:(0,o.tval)("Response format",{ns:i}),type:"string","x-decorator":"FormItem","x-component":"Select",enum:[{label:t("Text"),value:"text"},{label:t("JSON"),value:"json_object"}],default:"text"},timeout:{title:(0,o.tval)("Timeout (ms)",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:6e4},maxRetries:{title:(0,o.tval)("Max retries",{ns:i}),type:"number","x-decorator":"FormItem","x-component":"InputNumber",default:1}}}})}]}))},s={components:{ProviderSettingsForm:function(){return n().createElement(e.SchemaComponent,{schema:{type:"void",properties:{baseURL:{title:(0,o.tval)("Base URL",{ns:i}),type:"string",required:!0,"x-decorator":"FormItem","x-component":"TextAreaWithGlobalScope","x-component-props":{placeholder:"https://your-llm-server.com/v1"}},apiKey:{title:(0,o.tval)("API Key",{ns:i}),type:"string",required:!0,"x-decorator":"FormItem","x-component":"TextAreaWithGlobalScope"},requestConfig:{title:(0,o.tval)("Request config (JSON)",{ns:i}),type:"string","x-decorator":"FormItem","x-component":"Input.TextArea","x-component-props":{placeholder:JSON.stringify({extraHeaders:{},extraBody:{},modelKwargs:{}},null,2),rows:6,style:{fontFamily:"monospace",fontSize:12}},description:(0,o.tval)("Request config description",{ns:i})},responseConfig:{title:(0,o.tval)("Response config (JSON)",{ns:i}),type:"string","x-decorator":"FormItem","x-component":"Input.TextArea","x-component-props":{placeholder:JSON.stringify({contentPath:"auto",reasoningKey:"reasoning_content",responseMapping:{content:"message.response"}},null,2),rows:8,style:{fontFamily:"monospace",fontSize:12}},description:(0,o.tval)("Response config description",{ns:i})}}}})},ModelSettingsForm:function(){return n().createElement(e.SchemaComponent,{components:{Options:p,ModelSelect:c.ModelSelect},schema:{type:"void",properties:{model:{title:(0,o.tval)("Model",{ns:i}),type:"string",required:!0,"x-decorator":"FormItem","x-component":"ModelSelect"},options:{type:"void","x-component":"Options"}}}})}}};function f(e,t,n,o,r,i,u){try{var c=e[i](u),a=c.value}catch(e){n(e);return}c.done?t(a):Promise.resolve(a).then(o,r)}function m(e){return function(){var t=this,n=arguments;return new Promise(function(o,r){var i=e.apply(t,n);function u(e){f(i,o,r,u,c,"next",e)}function c(e){f(i,o,r,u,c,"throw",e)}u(void 0)})}}function d(e,t,n){return(d=v()?Reflect.construct:function(e,t,n){var o=[null];o.push.apply(o,t);var r=new(Function.bind.apply(e,o));return n&&b(r,n.prototype),r}).apply(null,arguments)}function y(e){return(y=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function b(e,t){return(b=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function x(e){var t="function"==typeof Map?new Map:void 0;return(x=function(e){if(null===e||-1===Function.toString.call(e).indexOf("[native code]"))return e;if("function"!=typeof e)throw TypeError("Super expression must either be null or a function");if(void 0!==t){if(t.has(e))return t.get(e);t.set(e,n)}function n(){return d(e,arguments,y(this).constructor)}return n.prototype=Object.create(e.prototype,{constructor:{value:n,enumerable:!1,writable:!0,configurable:!0}}),b(n,e)})(e)}function v(){try{var e=!Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){}))}catch(e){}return(v=function(){return!!e})()}function g(e,t){var n,o,r,i,u={label:0,sent:function(){if(1&r[0])throw r[1];return r[1]},trys:[],ops:[]};return i={next:c(0),throw:c(1),return:c(2)},"function"==typeof Symbol&&(i[Symbol.iterator]=function(){return this}),i;function c(i){return function(c){var a=[i,c];if(n)throw TypeError("Generator is already executing.");for(;u;)try{if(n=1,o&&(r=2&a[0]?o.return:a[0]?o.throw||((r=o.return)&&r.call(o),0):o.next)&&!(r=r.call(o,a[1])).done)return r;switch(o=0,r&&(a=[2&a[0],r.value]),a[0]){case 0:case 1:r=a;break;case 4:return u.label++,{value:a[1],done:!1};case 5:u.label++,o=a[1],a=[0];continue;case 7:a=u.ops.pop(),u.trys.pop();continue;default:if(!(r=(r=u.trys).length>0&&r[r.length-1])&&(6===a[0]||2===a[0])){u=0;continue}if(3===a[0]&&(!r||a[1]>r[0]&&a[1]<r[3])){u.label=a[1];break}if(6===a[0]&&u.label<r[1]){u.label=r[1],r=a;break}if(r&&u.label<r[2]){u.label=r[2],u.ops.push(a);break}r[2]&&u.ops.pop(),u.trys.pop();continue}a=t.call(e,u)}catch(e){a=[6,e],o=0}finally{n=r=0}if(5&a[0])throw a[1];return{value:a[0]?a[1]:void 0,done:!0}}}}var h=function(e){var t;if("function"!=typeof e&&null!==e)throw TypeError("Super expression must either be null or a function");function n(){var e,t;if(!(this instanceof n))throw TypeError("Cannot call a class as a function");return e=n,t=arguments,e=y(e),function(e,t){var n;if(t&&("object"==((n=t)&&"undefined"!=typeof Symbol&&n.constructor===Symbol?"symbol":typeof n)||"function"==typeof t))return t;if(void 0===e)throw ReferenceError("this hasn't been initialised - super() hasn't been called");return e}(this,v()?Reflect.construct(e,t||[],y(this).constructor):e.apply(this,t))}return n.prototype=Object.create(e&&e.prototype,{constructor:{value:n,writable:!0,configurable:!0}}),e&&b(n,e),t=[{key:"afterAdd",value:function(){return m(function(){return g(this,function(e){return[2]})})()}},{key:"beforeLoad",value:function(){return m(function(){return g(this,function(e){return[2]})})()}},{key:"load",value:function(){var e=this;return m(function(){return g(this,function(t){return e.aiPlugin.aiManager.registerLLMProvider("custom-llm",s),[2]})})()}},{key:"aiPlugin",get:function(){return this.app.pm.get("ai")}}],function(e,t){for(var n=0;n<t.length;n++){var o=t[n];o.enumerable=o.enumerable||!1,o.configurable=!0,"value"in o&&(o.writable=!0),Object.defineProperty(e,o.key,o)}}(n.prototype,t),n}(x(e.Plugin)),S=h}(),l}()});
@@ -16,5 +16,5 @@
16
16
  "Request config (JSON)": "Request config (JSON)",
17
17
  "Request config description": "Extra configuration for LLM requests. Supported keys: extraHeaders (custom HTTP headers), extraBody (extra request body fields), modelKwargs (LangChain model kwargs).",
18
18
  "Response config (JSON)": "Response config (JSON)",
19
- "Response config description": "Configure response parsing. contentPath: 'auto' (default) or dot-path like '0.text'. reasoningKey: key name for reasoning content (default: 'reasoning_content')."
19
+ "Response config description": "Configure response parsing. contentPath: 'auto' or dot-path. reasoningKey: key for reasoning content. responseMapping: { content: 'dot.path' } — maps non-standard LLM response to OpenAI format (e.g., 'message.response')."
20
20
  }
@@ -16,5 +16,5 @@
16
16
  "Request config (JSON)": "Cấu hình request (JSON)",
17
17
  "Request config description": "Cấu hình bổ sung cho request LLM. Các key hỗ trợ: extraHeaders (HTTP headers tùy chỉnh), extraBody (thêm trường vào request body), modelKwargs (tham số model LangChain).",
18
18
  "Response config (JSON)": "Cấu hình response (JSON)",
19
- "Response config description": "Cấu hình cách parse response. contentPath: 'auto' (mặc định) hoặc dot-path như '0.text'. reasoningKey: tên key cho reasoning content (mặc định: 'reasoning_content')."
19
+ "Response config description": "Cấu hình parse response. contentPath: 'auto' hoặc dot-path. reasoningKey: key reasoning. responseMapping: { content: 'dot.path' } mapping response không chuẩn OpenAI ( dụ: 'message.response')."
20
20
  }
@@ -74,9 +74,7 @@ function extractTextContent(content, contentPath) {
74
74
  } catch {
75
75
  }
76
76
  }
77
- if (typeof content === "string") {
78
- return content;
79
- }
77
+ if (typeof content === "string") return content;
80
78
  if (Array.isArray(content)) {
81
79
  return content.filter((block) => block && block.type === "text").map((block) => block.text ?? "").join("");
82
80
  }
@@ -93,6 +91,122 @@ function safeParseJSON(str) {
93
91
  return {};
94
92
  }
95
93
  }
94
+ function getByPath(obj, dotPath) {
95
+ if (!obj || !dotPath) return void 0;
96
+ const keys = dotPath.split(".");
97
+ let current = obj;
98
+ for (const key of keys) {
99
+ if (current == null) return void 0;
100
+ current = current[key];
101
+ }
102
+ return current;
103
+ }
104
+ function createMappingFetch(responseMapping) {
105
+ const contentPath = responseMapping.content;
106
+ if (!contentPath) return void 0;
107
+ return async (url, init) => {
108
+ var _a, _b;
109
+ const response = await fetch(url, init);
110
+ if (!response.ok) return response;
111
+ const contentType = response.headers.get("content-type") || "";
112
+ if (contentType.includes("text/event-stream") || ((_a = init == null ? void 0 : init.headers) == null ? void 0 : _a["Accept"]) === "text/event-stream") {
113
+ const reader = (_b = response.body) == null ? void 0 : _b.getReader();
114
+ if (!reader) return response;
115
+ const stream = new ReadableStream({
116
+ async start(controller) {
117
+ const decoder = new TextDecoder();
118
+ const encoder = new TextEncoder();
119
+ let buffer = "";
120
+ try {
121
+ while (true) {
122
+ const { done, value } = await reader.read();
123
+ if (done) {
124
+ controller.close();
125
+ break;
126
+ }
127
+ buffer += decoder.decode(value, { stream: true });
128
+ const lines = buffer.split("\n");
129
+ buffer = lines.pop() || "";
130
+ for (const line of lines) {
131
+ if (line.startsWith("data: ")) {
132
+ const data = line.slice(6).trim();
133
+ if (data === "[DONE]") {
134
+ controller.enqueue(encoder.encode("data: [DONE]\n\n"));
135
+ continue;
136
+ }
137
+ try {
138
+ const parsed = JSON.parse(data);
139
+ const mappedContent = getByPath(parsed, contentPath);
140
+ if (mappedContent !== void 0) {
141
+ const mapped = {
142
+ id: getByPath(parsed, responseMapping.id || "id") || "chatcmpl-custom",
143
+ object: "chat.completion.chunk",
144
+ created: Math.floor(Date.now() / 1e3),
145
+ model: "custom",
146
+ choices: [{
147
+ index: 0,
148
+ delta: { content: String(mappedContent), role: "assistant" },
149
+ finish_reason: null
150
+ }]
151
+ };
152
+ controller.enqueue(encoder.encode(`data: ${JSON.stringify(mapped)}
153
+
154
+ `));
155
+ } else {
156
+ controller.enqueue(encoder.encode(line + "\n"));
157
+ }
158
+ } catch {
159
+ controller.enqueue(encoder.encode(line + "\n"));
160
+ }
161
+ } else {
162
+ controller.enqueue(encoder.encode(line + "\n"));
163
+ }
164
+ }
165
+ }
166
+ } catch (err) {
167
+ controller.error(err);
168
+ }
169
+ }
170
+ });
171
+ return new Response(stream, {
172
+ status: response.status,
173
+ statusText: response.statusText,
174
+ headers: new Headers({
175
+ "content-type": "text/event-stream"
176
+ })
177
+ });
178
+ }
179
+ if (contentType.includes("application/json")) {
180
+ const body = await response.json();
181
+ const mappedContent = getByPath(body, contentPath);
182
+ if (mappedContent !== void 0) {
183
+ const mapped = {
184
+ id: getByPath(body, responseMapping.id || "id") || "chatcmpl-custom",
185
+ object: "chat.completion",
186
+ created: Math.floor(Date.now() / 1e3),
187
+ model: "custom",
188
+ choices: [{
189
+ index: 0,
190
+ message: {
191
+ role: getByPath(body, responseMapping.role || "") || "assistant",
192
+ content: String(mappedContent)
193
+ },
194
+ finish_reason: "stop"
195
+ }],
196
+ usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 }
197
+ };
198
+ return new Response(JSON.stringify(mapped), {
199
+ status: response.status,
200
+ statusText: response.statusText,
201
+ headers: new Headers({
202
+ "content-type": "application/json"
203
+ })
204
+ });
205
+ }
206
+ }
207
+ return response;
208
+ };
209
+ }
96
210
  class CustomLLMProvider extends import_plugin_ai.LLMProvider {
97
211
  get baseURL() {
98
212
  return null;
@@ -109,6 +223,7 @@ class CustomLLMProvider extends import_plugin_ai.LLMProvider {
109
223
  const { baseURL, apiKey } = this.serviceOptions || {};
110
224
  const { responseFormat } = this.modelOptions || {};
111
225
  const reqConfig = this.requestConfig;
226
+ const resConfig = this.responseConfig;
112
227
  const responseFormatOptions = {
113
228
  type: responseFormat ?? "text"
114
229
  };
@@ -132,19 +247,16 @@ class CustomLLMProvider extends import_plugin_ai.LLMProvider {
132
247
  if (reqConfig.extraHeaders && typeof reqConfig.extraHeaders === "object") {
133
248
  config.configuration.defaultHeaders = reqConfig.extraHeaders;
134
249
  }
250
+ if (resConfig.responseMapping) {
251
+ config.configuration.fetch = createMappingFetch(resConfig.responseMapping);
252
+ }
135
253
  return new ChatOpenAI(config);
136
254
  }
137
- /**
138
- * Handle streaming chunks — normalize content format using responseConfig.
139
- */
140
255
  parseResponseChunk(chunk) {
141
256
  const resConfig = this.responseConfig;
142
257
  const text = extractTextContent(chunk, resConfig.contentPath);
143
258
  return stripToolCallTags(text);
144
259
  }
145
- /**
146
- * Handle saved messages — normalize array content when loading from DB.
147
- */
148
260
  parseResponseMessage(message) {
149
261
  const { content: rawContent, messageId, metadata, role, toolCalls, attachments, workContext } = message;
150
262
  const content = {
@@ -170,19 +282,13 @@ class CustomLLMProvider extends import_plugin_ai.LLMProvider {
170
282
  role
171
283
  };
172
284
  }
173
- /**
174
- * Parse reasoning content using responseConfig.reasoningKey.
175
- */
176
285
  parseReasoningContent(chunk) {
177
286
  var _a;
178
287
  const resConfig = this.responseConfig;
179
288
  const reasoningKey = resConfig.reasoningKey || "reasoning_content";
180
289
  const reasoning = (_a = chunk == null ? void 0 : chunk.additional_kwargs) == null ? void 0 : _a[reasoningKey];
181
290
  if (reasoning && typeof reasoning === "string") {
182
- return {
183
- status: "streaming",
184
- content: reasoning
185
- };
291
+ return { status: "streaming", content: reasoning };
186
292
  }
187
293
  return null;
188
294
  }
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "displayName": "AI LLM: Custom (OpenAI Compatible)",
4
4
  "displayName.zh-CN": "AI LLM:自定义(OpenAI 兼容)",
5
5
  "description": "OpenAI-compatible LLM provider with auto response format detection for external LLM services.",
6
- "version": "1.0.0",
6
+ "version": "1.0.1",
7
7
  "main": "dist/server/index.js",
8
8
  "nocobase": {
9
9
  "supportedVersions": [
Binary file
Binary file