plugin-custom-llm 1.2.1 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/index.js +9 -0
- package/dist/externalVersion.js +16 -7
- package/dist/index.js +9 -0
- package/dist/server/index.js +9 -0
- package/dist/server/llm-providers/custom-llm.js +157 -5
- package/dist/server/plugin.js +9 -0
- package/dist/swagger.js +39 -0
- package/package.json +9 -1
- package/src/client/client.d.ts +249 -0
- package/src/client/index.tsx +19 -0
- package/src/client/llm-providers/custom-llm/ModelSettings.tsx +139 -0
- package/src/client/llm-providers/custom-llm/ProviderSettings.tsx +115 -0
- package/src/client/llm-providers/custom-llm/index.ts +10 -0
- package/src/client/locale.ts +8 -0
- package/{dist/client/models/index.d.ts → src/client/models/index.ts} +12 -10
- package/src/client/plugin.tsx +10 -0
- package/{dist/index.d.ts → src/index.ts} +2 -2
- package/src/locale/en-US.json +29 -0
- package/src/locale/vi-VN.json +29 -0
- package/src/locale/zh-CN.json +16 -0
- package/src/server/collections/.gitkeep +0 -0
- package/{dist/server/index.d.ts → src/server/index.ts} +1 -1
- package/src/server/llm-providers/custom-llm.ts +992 -0
- package/src/server/plugin.ts +27 -0
- package/src/swagger.ts +9 -0
- package/dist/client/index.d.ts +0 -8
- package/dist/client/llm-providers/custom-llm/ModelSettings.d.ts +0 -2
- package/dist/client/llm-providers/custom-llm/ProviderSettings.d.ts +0 -2
- package/dist/client/llm-providers/custom-llm/index.d.ts +0 -2
- package/dist/client/locale.d.ts +0 -2
- package/dist/client/plugin.d.ts +0 -5
- package/dist/server/llm-providers/custom-llm.d.ts +0 -54
- package/dist/server/plugin.d.ts +0 -12
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { Plugin } from '@nocobase/server';
|
|
2
|
+
import PluginAIServer from '@nocobase/plugin-ai';
|
|
3
|
+
import { customLLMProviderOptions } from './llm-providers/custom-llm';
|
|
4
|
+
|
|
5
|
+
export class PluginCustomLLMServer extends Plugin {
|
|
6
|
+
async afterAdd() {}
|
|
7
|
+
|
|
8
|
+
async beforeLoad() {}
|
|
9
|
+
|
|
10
|
+
async load() {
|
|
11
|
+
this.aiPlugin.aiManager.registerLLMProvider('custom-llm', customLLMProviderOptions);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async install() {}
|
|
15
|
+
|
|
16
|
+
async afterEnable() {}
|
|
17
|
+
|
|
18
|
+
async afterDisable() {}
|
|
19
|
+
|
|
20
|
+
async remove() {}
|
|
21
|
+
|
|
22
|
+
private get aiPlugin(): PluginAIServer {
|
|
23
|
+
return this.app.pm.get('ai');
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export default PluginCustomLLMServer;
|
package/src/swagger.ts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export default {
|
|
2
|
+
info: {
|
|
3
|
+
title: 'NocoBase API - Custom LLM Plugin',
|
|
4
|
+
description:
|
|
5
|
+
'Registers a custom OpenAI-compatible LLM provider with the AI plugin. This plugin has no direct HTTP endpoints — all API access is through the AI API plugin gateway (`/api/ai-llm/v1/*`) using models registered under the `custom-llm` service.',
|
|
6
|
+
},
|
|
7
|
+
tags: [{ name: 'custom-llm', description: 'Custom LLM provider (no direct endpoints)' }],
|
|
8
|
+
paths: {},
|
|
9
|
+
};
|
package/dist/client/index.d.ts
DELETED
package/dist/client/locale.d.ts
DELETED
package/dist/client/plugin.d.ts
DELETED
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
import { LLMProvider, LLMProviderMeta } from '@nocobase/plugin-ai';
|
|
2
|
-
import { Model } from '@nocobase/database';
|
|
3
|
-
import { Context } from '@nocobase/actions';
|
|
4
|
-
import type { ParsedAttachmentResult } from '@nocobase/plugin-ai';
|
|
5
|
-
export declare class CustomLLMProvider extends LLMProvider {
|
|
6
|
-
get baseURL(): any;
|
|
7
|
-
private get requestConfig();
|
|
8
|
-
private get responseConfig();
|
|
9
|
-
createModel(): any;
|
|
10
|
-
parseResponseChunk(chunk: any): string | null;
|
|
11
|
-
parseResponseMessage(message: Model): {
|
|
12
|
-
key: Model;
|
|
13
|
-
content: Record<string, any>;
|
|
14
|
-
role: Model;
|
|
15
|
-
};
|
|
16
|
-
parseReasoningContent(chunk: any): {
|
|
17
|
-
status: string;
|
|
18
|
-
content: string;
|
|
19
|
-
} | null;
|
|
20
|
-
/**
|
|
21
|
-
* Extract response metadata from LLM output for post-save enrichment.
|
|
22
|
-
* Sanitizes overly long message IDs from Gemini or other providers.
|
|
23
|
-
*/
|
|
24
|
-
parseResponseMetadata(output: any): any;
|
|
25
|
-
parseResponseError(err: any): any;
|
|
26
|
-
/**
|
|
27
|
-
* Self-contained file reading that correctly handles the APP_PUBLIC_PATH prefix.
|
|
28
|
-
*
|
|
29
|
-
* plugin-ai's encodeLocalFile does path.join(cwd, url) without stripping
|
|
30
|
-
* APP_PUBLIC_PATH, so when the app is deployed under a sub-path (e.g. /my-app)
|
|
31
|
-
* the resolved path becomes '{cwd}/my-app/storage/uploads/…' which does not exist.
|
|
32
|
-
* We cannot fix that in plugin-ai (core), so we re-implement file reading here
|
|
33
|
-
* with the prefix stripped before the cwd join.
|
|
34
|
-
*/
|
|
35
|
-
private readFileAsBase64;
|
|
36
|
-
/**
|
|
37
|
-
* Override parseAttachment to convert all attachments into formats that
|
|
38
|
-
* generic OpenAI-compatible endpoints actually support:
|
|
39
|
-
*
|
|
40
|
-
* - Images → image_url block with base64 data URI (vision models)
|
|
41
|
-
* - Text files → text block with decoded UTF-8 content
|
|
42
|
-
* - Binary → text block with base64 data URI (multi-modal or fallback)
|
|
43
|
-
*
|
|
44
|
-
* The base-class implementation returns a LangChain ContentBlock.Multimodal.File
|
|
45
|
-
* (`type: 'file'`) for non-image attachments. LangChain serialises this as the
|
|
46
|
-
* newer OpenAI Files API format which most custom/local endpoints do NOT understand,
|
|
47
|
-
* causing file content to be silently dropped.
|
|
48
|
-
*
|
|
49
|
-
* This method is entirely self-contained — it does not call super — so it is
|
|
50
|
-
* safe to use without modifying plugin-ai core.
|
|
51
|
-
*/
|
|
52
|
-
parseAttachment(ctx: Context, attachment: any): Promise<ParsedAttachmentResult>;
|
|
53
|
-
}
|
|
54
|
-
export declare const customLLMProviderOptions: LLMProviderMeta;
|
package/dist/server/plugin.d.ts
DELETED
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { Plugin } from '@nocobase/server';
|
|
2
|
-
export declare class PluginCustomLLMServer extends Plugin {
|
|
3
|
-
afterAdd(): Promise<void>;
|
|
4
|
-
beforeLoad(): Promise<void>;
|
|
5
|
-
load(): Promise<void>;
|
|
6
|
-
install(): Promise<void>;
|
|
7
|
-
afterEnable(): Promise<void>;
|
|
8
|
-
afterDisable(): Promise<void>;
|
|
9
|
-
remove(): Promise<void>;
|
|
10
|
-
private get aiPlugin();
|
|
11
|
-
}
|
|
12
|
-
export default PluginCustomLLMServer;
|